Compare commits
10 Commits
8ff26b3816
...
7d7cd14233
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7d7cd14233 | ||
|
|
93d20b230f | ||
|
|
93eb07d7d7 | ||
|
|
91e7d6a802 | ||
|
|
3e0f96dc4d | ||
|
|
937019b9a1 | ||
|
|
0bdef113e7 | ||
|
|
3d5400da67 | ||
|
|
71c5fd5995 | ||
|
|
6ee54c8399 |
2
Makefile
2
Makefile
@ -9,7 +9,7 @@ bootstrap:
|
|||||||
test:
|
test:
|
||||||
python3 -m unittest discover -s tests -p 'test_*.py'
|
python3 -m unittest discover -s tests -p 'test_*.py'
|
||||||
pnpm --filter api test
|
pnpm --filter api test
|
||||||
pnpm --filter web test src/features/assets/assets-page.test.tsx src/features/workflows/workflow-editor-page.test.tsx src/features/explore/explore-page.test.tsx src/runtime/workflow-editor-state.test.ts src/runtime/i18n.test.ts
|
pnpm --filter web test src/features/assets/assets-page.test.tsx src/features/workflows/workflow-editor-page.test.tsx src/features/explore/explore-page.test.tsx src/runtime/workflow-editor-state.test.ts src/runtime/i18n.test.ts src/runtime/custom-node-presenter.test.ts
|
||||||
pnpm --filter web build
|
pnpm --filter web build
|
||||||
pnpm --filter worker test
|
pnpm --filter worker test
|
||||||
|
|
||||||
|
|||||||
30
README.md
30
README.md
@ -2,6 +2,17 @@
|
|||||||
|
|
||||||
EmboFlow is a B/S embodied-data workflow platform for raw asset ingestion, delivery normalization, dataset transformation, workflow execution, preview, and export.
|
EmboFlow is a B/S embodied-data workflow platform for raw asset ingestion, delivery normalization, dataset transformation, workflow execution, preview, and export.
|
||||||
|
|
||||||
|
## Current V1 Features
|
||||||
|
|
||||||
|
- Project-scoped workspace shell with a dedicated Projects page and active project selector in the header
|
||||||
|
- Asset workspace that supports local asset registration, probe summaries, storage connection management, and dataset creation
|
||||||
|
- Project-scoped custom node registry with Docker image and Dockerfile based node definitions
|
||||||
|
- Workflow templates as first-class objects, including default project templates and creating project workflows from a template
|
||||||
|
- Blank workflow creation and a large React Flow editor with drag-and-drop nodes, free canvas movement, edge validation, Docker-first node runtime presets, and Python code-hook injection
|
||||||
|
- Workflow-level `Save As Template` so edited graphs can be promoted into reusable project templates
|
||||||
|
- Mongo-backed run orchestration, worker execution, run history, task detail, logs, stdout/stderr, artifacts, cancel, retry, and task retry
|
||||||
|
- Runtime shell level Chinese and English switching
|
||||||
|
|
||||||
## Bootstrap
|
## Bootstrap
|
||||||
|
|
||||||
From the repository root:
|
From the repository root:
|
||||||
@ -65,14 +76,25 @@ The local validation path currently used for embodied data testing is:
|
|||||||
```
|
```
|
||||||
|
|
||||||
You can register that directory from the Assets page or via `POST /api/assets/register`.
|
You can register that directory from the Assets page or via `POST /api/assets/register`.
|
||||||
The workflow editor currently requires selecting at least one registered asset before a run can be created.
|
The workflow editor now supports workflow input bindings for both registered assets and project datasets. Dataset bindings are expanded into runnable asset ids during preflight and run creation, and run detail shows `input sources`, `input assets`, and `input datasets` separately.
|
||||||
The editor now also persists per-node runtime config in workflow versions, including executor overrides, optional artifact title overrides, and Python code-hook source for inspect and transform style nodes.
|
The editor now also persists per-node runtime config in workflow versions, including executor overrides, optional artifact title overrides, and Python code-hook source for inspect and transform style nodes.
|
||||||
The runtime web shell now exposes a visible `中文 / English` language toggle. The core workspace shell and workflow authoring surface are translated through a lightweight i18n layer.
|
The runtime web shell now exposes a visible `中文 / English` language toggle. The core workspace shell and workflow authoring surface are translated through a lightweight i18n layer.
|
||||||
The workflow editor center panel now uses a real draggable node canvas with zoom, pan, mini-map, dotted background, handle-based edge creation, and persisted node positions instead of a static list of node cards.
|
The shell now also exposes a dedicated Projects page plus an active project selector, so assets, datasets, workflow templates, workflows, and runs all switch together at the project boundary.
|
||||||
The Runs workspace now shows project-scoped run history, run-level aggregated summaries, cancel/retry controls, and run detail views with persisted task summaries, stdout/stderr sections, result previews, and artifact links into Explore.
|
The Assets workspace now includes first-class storage connections and datasets. A dataset is distinct from a raw asset and binds project source assets to a selected local or object-storage-backed destination.
|
||||||
|
The shell now also exposes a dedicated Nodes page for project-scoped custom container nodes. Custom nodes can be registered from an existing Docker image or a self-contained Dockerfile, and each node declares whether it consumes a single asset set or multiple upstream asset sets plus what kind of output it produces.
|
||||||
|
The Workflows workspace now includes a template gallery. Projects can start from default or saved templates, or create a blank workflow directly.
|
||||||
|
The workflow editor center panel now uses a real draggable node canvas with zoom, pan, mini-map, dotted background, handle-based edge creation, persisted node positions, and localized validation feedback instead of a static list of node cards.
|
||||||
|
The workflow editor right panel now also supports saving the current workflow draft as a reusable workflow template, in addition to editing per-node runtime settings and Python hooks.
|
||||||
|
When a custom node is selected on the canvas, the right panel now also exposes its declared input contract, output contract, artifact type, and container source so the operator can confirm compatibility without leaving the editor.
|
||||||
|
The workflow editor now also exposes a workflow-level preflight panel. Saved workflow versions can be checked against the selected asset or dataset binding before execution, and run creation is blocked when the current version still has graph, executor, or input-binding errors.
|
||||||
|
The node library now supports both click-to-append and drag-and-drop placement into the canvas. When a node is inserted from the library, the editor now seeds its default runtime contract directly into the workflow draft, so custom Docker nodes keep their declared executor type and I/O contract without extra manual edits. V1 connection rules block self-edges, duplicate edges, cycles, incoming edges into source nodes, outgoing edges from export nodes, and multiple upstream edges into ordinary nodes, while allowing multi-input set nodes such as `union-assets`, `intersect-assets`, and `difference-assets` plus any custom node whose runtime contract declares `inputMode=multi_asset_set`.
|
||||||
|
The Runs workspace now shows project-scoped run history, run-level aggregated summaries, cancel/retry controls, and run detail views with persisted task summaries, stdout/stderr sections, result previews, artifact links into Explore, plus explicit input-source visibility for both assets and datasets.
|
||||||
Selected run tasks now expose the frozen node definition id, executor config snapshot, and code-hook metadata that were captured when the run was created.
|
Selected run tasks now expose the frozen node definition id, executor config snapshot, and code-hook metadata that were captured when the run was created.
|
||||||
When a node uses `executorType=docker` and provides `executorConfig.image`, the worker now runs a real local Docker container with mounted `input.json` / `output.json` exchange files. If no image is configured, the executor falls back to the lightweight simulated behavior used by older demo tasks.
|
Most built-in delivery nodes now default to `executorType=docker`. When a node uses `executorType=docker` and provides `executorConfig.image`, the worker runs a real local Docker container with mounted `input.json` / `output.json` exchange files plus read-only mounts for bound asset paths. If no image is configured, the executor falls back to the lightweight simulated behavior used by older demo tasks.
|
||||||
|
The Docker runner now treats missing or `null` `codeHookSpec` values as “no hook configured”, so built-in Docker nodes and custom container nodes can share the same task envelope without crashing on optional hook fields.
|
||||||
|
Custom Docker nodes follow the same runtime contract. The container reads the task snapshot and execution context from `EMBOFLOW_INPUT_PATH`, writes `{\"result\": ...}` JSON to `EMBOFLOW_OUTPUT_PATH`, and if it declares an asset-set output contract it must return `result.assetIds` as a string array. Dockerfile-based custom nodes are built locally on first execution and then reused by tag. The Nodes page and API now share the same validation rules, including required names, valid source kinds, a mandatory `FROM` instruction for Dockerfiles, and rejection of `Source` category nodes that incorrectly declare `inputMode=multi_asset_set`. The editor also renders the standard EmboFlow input and output envelope preview for custom nodes so users can align container code to the actual runtime JSON shape.
|
||||||
When a node uses the built-in Python path without a custom hook, `source-asset` now emits bound asset metadata from Mongo-backed asset records and `validate-structure` now performs a real directory validation pass against local source paths. On the current sample path `/Users/longtaowu/workspace/emboldata/data`, that validation reports `valid=false`, `videoFileCount=407`, and missing delivery files because the sample root is a mixed dataset collection rather than a delivery package.
|
When a node uses the built-in Python path without a custom hook, `source-asset` now emits bound asset metadata from Mongo-backed asset records and `validate-structure` now performs a real directory validation pass against local source paths. On the current sample path `/Users/longtaowu/workspace/emboldata/data`, that validation reports `valid=false`, `videoFileCount=407`, and missing delivery files because the sample root is a mixed dataset collection rather than a delivery package.
|
||||||
|
The worker now also carries direct upstream task results into execution context so set-operation utility nodes can compute narrowed asset sets and pass those effective asset ids to downstream tasks.
|
||||||
|
|
||||||
## Repository Structure
|
## Repository Structure
|
||||||
|
|
||||||
|
|||||||
@ -13,7 +13,9 @@ export const runTaskSchemaDefinition = {
|
|||||||
artifactTitle: { type: "string", required: false, default: null },
|
artifactTitle: { type: "string", required: false, default: null },
|
||||||
status: { type: "string", required: true },
|
status: { type: "string", required: true },
|
||||||
attempt: { type: "number", required: true, default: 1 },
|
attempt: { type: "number", required: true, default: 1 },
|
||||||
|
inputBindings: { type: "array", required: true, default: [] },
|
||||||
assetIds: { type: "array", required: true, default: [] },
|
assetIds: { type: "array", required: true, default: [] },
|
||||||
|
datasetIds: { type: "array", required: true, default: [] },
|
||||||
upstreamNodeIds: { type: "array", required: true, default: [] },
|
upstreamNodeIds: { type: "array", required: true, default: [] },
|
||||||
outputArtifactIds: { type: "array", required: true, default: [] },
|
outputArtifactIds: { type: "array", required: true, default: [] },
|
||||||
logLines: { type: "array", required: true, default: [] },
|
logLines: { type: "array", required: true, default: [] },
|
||||||
|
|||||||
@ -5,7 +5,9 @@ export const workflowRunSchemaDefinition = {
|
|||||||
workflowVersionId: { type: "string", required: true },
|
workflowVersionId: { type: "string", required: true },
|
||||||
status: { type: "string", required: true },
|
status: { type: "string", required: true },
|
||||||
triggeredBy: { type: "string", required: true },
|
triggeredBy: { type: "string", required: true },
|
||||||
|
inputBindings: { type: "array", required: true, default: [] },
|
||||||
assetIds: { type: "array", required: true, default: [] },
|
assetIds: { type: "array", required: true, default: [] },
|
||||||
|
datasetIds: { type: "array", required: true, default: [] },
|
||||||
runtimeSnapshot: { type: "object", required: false, default: null },
|
runtimeSnapshot: { type: "object", required: false, default: null },
|
||||||
summary: { type: "object", required: false, default: null },
|
summary: { type: "object", required: false, default: null },
|
||||||
startedAt: { type: "date", required: false, default: null },
|
startedAt: { type: "date", required: false, default: null },
|
||||||
|
|||||||
@ -1,38 +1,128 @@
|
|||||||
export const DELIVERY_NODE_DEFINITIONS = [
|
import type { ExecutorType, NodeRuntimeConfig } from "../../../../worker/src/contracts/execution-context.ts";
|
||||||
|
|
||||||
|
export type DeliveryNodeDefinition = {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
category: "Source" | "Transform" | "Inspect" | "Annotate" | "Export" | "Utility";
|
||||||
|
description: string;
|
||||||
|
defaultExecutorType?: ExecutorType;
|
||||||
|
defaultExecutorConfig?: Record<string, unknown>;
|
||||||
|
supportsCodeHook?: boolean;
|
||||||
|
allowsMultipleIncoming?: boolean;
|
||||||
|
};
|
||||||
|
|
||||||
|
const DEFAULT_DOCKER_EXECUTOR_CONFIG = {
|
||||||
|
image: "python:3.11-alpine",
|
||||||
|
networkMode: "none",
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
function createDockerDefaults(): Pick<DeliveryNodeDefinition, "defaultExecutorType" | "defaultExecutorConfig"> {
|
||||||
|
return {
|
||||||
|
defaultExecutorType: "docker",
|
||||||
|
defaultExecutorConfig: { ...DEFAULT_DOCKER_EXECUTOR_CONFIG },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export const DELIVERY_NODE_DEFINITIONS: readonly DeliveryNodeDefinition[] = [
|
||||||
{
|
{
|
||||||
id: "source-asset",
|
id: "source-asset",
|
||||||
name: "Source Asset",
|
name: "Source Asset",
|
||||||
category: "Source",
|
category: "Source",
|
||||||
description: "Load the uploaded asset or registered path into the workflow.",
|
description: "Load the uploaded asset or registered path into the workflow.",
|
||||||
|
...createDockerDefaults(),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: "extract-archive",
|
id: "extract-archive",
|
||||||
name: "Extract Archive",
|
name: "Extract Archive",
|
||||||
category: "Transform",
|
category: "Transform",
|
||||||
description: "Unpack tar, zip, or zst archives for downstream processing.",
|
description: "Unpack tar, zip, or zst archives for downstream processing.",
|
||||||
|
...createDockerDefaults(),
|
||||||
|
supportsCodeHook: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: "rename-folder",
|
id: "rename-folder",
|
||||||
name: "Rename Delivery Folder",
|
name: "Rename Delivery Folder",
|
||||||
category: "Transform",
|
category: "Transform",
|
||||||
description: "Rename the top-level folder to the delivery naming convention.",
|
description: "Rename the top-level folder to the delivery naming convention.",
|
||||||
|
...createDockerDefaults(),
|
||||||
|
supportsCodeHook: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: "validate-structure",
|
id: "validate-structure",
|
||||||
name: "Validate Structure",
|
name: "Validate Structure",
|
||||||
category: "Inspect",
|
category: "Inspect",
|
||||||
description: "Check required directories and metadata files.",
|
description: "Check required directories and metadata files.",
|
||||||
|
...createDockerDefaults(),
|
||||||
|
supportsCodeHook: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: "validate-metadata",
|
id: "validate-metadata",
|
||||||
name: "Validate Metadata",
|
name: "Validate Metadata",
|
||||||
category: "Inspect",
|
category: "Inspect",
|
||||||
description: "Validate meta.json, intrinsics.json, and video_meta.json.",
|
description: "Validate meta.json, intrinsics.json, and video_meta.json.",
|
||||||
|
...createDockerDefaults(),
|
||||||
|
supportsCodeHook: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "union-assets",
|
||||||
|
name: "Union Assets",
|
||||||
|
category: "Utility",
|
||||||
|
description: "Merge multiple upstream asset sets into one deduplicated asset set.",
|
||||||
|
...createDockerDefaults(),
|
||||||
|
supportsCodeHook: true,
|
||||||
|
allowsMultipleIncoming: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "intersect-assets",
|
||||||
|
name: "Intersect Assets",
|
||||||
|
category: "Utility",
|
||||||
|
description: "Keep only the assets that exist in every upstream asset set.",
|
||||||
|
...createDockerDefaults(),
|
||||||
|
supportsCodeHook: true,
|
||||||
|
allowsMultipleIncoming: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "difference-assets",
|
||||||
|
name: "Difference Assets",
|
||||||
|
category: "Utility",
|
||||||
|
description: "Subtract downstream asset sets from the first upstream asset set.",
|
||||||
|
...createDockerDefaults(),
|
||||||
|
supportsCodeHook: true,
|
||||||
|
allowsMultipleIncoming: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: "export-delivery-package",
|
id: "export-delivery-package",
|
||||||
name: "Export Delivery Package",
|
name: "Export Delivery Package",
|
||||||
category: "Export",
|
category: "Export",
|
||||||
description: "Publish the normalized package for downstream upload or handoff.",
|
description: "Publish the normalized package for downstream upload or handoff.",
|
||||||
|
...createDockerDefaults(),
|
||||||
},
|
},
|
||||||
] as const;
|
] as const;
|
||||||
|
|
||||||
|
const DELIVERY_NODE_DEFINITION_BY_ID = new Map(
|
||||||
|
DELIVERY_NODE_DEFINITIONS.map((definition) => [definition.id, definition]),
|
||||||
|
);
|
||||||
|
|
||||||
|
export function getDeliveryNodeDefinition(definitionId: string) {
|
||||||
|
return DELIVERY_NODE_DEFINITION_BY_ID.get(definitionId);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function buildDefaultNodeRuntimeConfig(definitionId: string): NodeRuntimeConfig | undefined {
|
||||||
|
const definition = getDeliveryNodeDefinition(definitionId);
|
||||||
|
if (!definition) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
const config: NodeRuntimeConfig = {};
|
||||||
|
if (definition.defaultExecutorType) {
|
||||||
|
config.executorType = definition.defaultExecutorType;
|
||||||
|
}
|
||||||
|
if (definition.defaultExecutorConfig) {
|
||||||
|
config.executorConfig = { ...definition.defaultExecutorConfig };
|
||||||
|
}
|
||||||
|
return Object.keys(config).length > 0 ? config : undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function deliveryNodeAllowsMultipleIncoming(definitionId: string) {
|
||||||
|
return Boolean(getDeliveryNodeDefinition(definitionId)?.allowsMultipleIncoming);
|
||||||
|
}
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@ -94,6 +94,34 @@ export async function createApiRuntime(config = resolveApiRuntimeConfig()) {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
app.post("/api/storage-connections", async (request, response, next) => {
|
||||||
|
try {
|
||||||
|
response.json(
|
||||||
|
await store.createStorageConnection({
|
||||||
|
workspaceId: request.body.workspaceId,
|
||||||
|
name: request.body.name,
|
||||||
|
provider: request.body.provider,
|
||||||
|
bucket: request.body.bucket,
|
||||||
|
endpoint: request.body.endpoint,
|
||||||
|
region: request.body.region,
|
||||||
|
basePath: request.body.basePath,
|
||||||
|
rootPath: request.body.rootPath,
|
||||||
|
createdBy: request.body.createdBy ?? "local-user",
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
app.get("/api/storage-connections", async (request, response, next) => {
|
||||||
|
try {
|
||||||
|
response.json(await store.listStorageConnections(String(request.query.workspaceId)));
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
app.post("/api/assets/register", async (request, response, next) => {
|
app.post("/api/assets/register", async (request, response, next) => {
|
||||||
try {
|
try {
|
||||||
const sourcePath = request.body.sourcePath as string | undefined;
|
const sourcePath = request.body.sourcePath as string | undefined;
|
||||||
@ -158,8 +186,153 @@ export async function createApiRuntime(config = resolveApiRuntimeConfig()) {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
app.get("/api/node-definitions", (_request, response) => {
|
app.post("/api/datasets", async (request, response, next) => {
|
||||||
response.json(store.listNodeDefinitions());
|
try {
|
||||||
|
response.json(
|
||||||
|
await store.createDataset({
|
||||||
|
workspaceId: request.body.workspaceId,
|
||||||
|
projectId: request.body.projectId,
|
||||||
|
name: request.body.name,
|
||||||
|
description: request.body.description,
|
||||||
|
sourceAssetIds: request.body.sourceAssetIds ?? [],
|
||||||
|
storageConnectionId: request.body.storageConnectionId,
|
||||||
|
storagePath: request.body.storagePath,
|
||||||
|
createdBy: request.body.createdBy ?? "local-user",
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
app.get("/api/datasets", async (request, response, next) => {
|
||||||
|
try {
|
||||||
|
response.json(await store.listDatasets(String(request.query.projectId)));
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
app.get("/api/datasets/:datasetId", async (request, response, next) => {
|
||||||
|
try {
|
||||||
|
const dataset = await store.getDataset(request.params.datasetId);
|
||||||
|
if (!dataset) {
|
||||||
|
response.status(404).json({ message: "dataset not found" });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
response.json(dataset);
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
app.get("/api/datasets/:datasetId/versions", async (request, response, next) => {
|
||||||
|
try {
|
||||||
|
response.json(await store.listDatasetVersions(request.params.datasetId));
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
app.post("/api/custom-nodes", async (request, response, next) => {
|
||||||
|
try {
|
||||||
|
response.json(
|
||||||
|
await store.createCustomNode({
|
||||||
|
workspaceId: request.body.workspaceId,
|
||||||
|
projectId: request.body.projectId,
|
||||||
|
name: request.body.name,
|
||||||
|
description: request.body.description,
|
||||||
|
category: request.body.category,
|
||||||
|
source: request.body.source,
|
||||||
|
contract: request.body.contract,
|
||||||
|
createdBy: request.body.createdBy ?? "local-user",
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
app.get("/api/custom-nodes", async (request, response, next) => {
|
||||||
|
try {
|
||||||
|
response.json(await store.listCustomNodes(String(request.query.projectId)));
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
app.get("/api/node-definitions", async (request, response, next) => {
|
||||||
|
try {
|
||||||
|
response.json(
|
||||||
|
await store.listNodeDefinitions(
|
||||||
|
request.query.projectId ? String(request.query.projectId) : undefined,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
app.post("/api/workflow-templates", async (request, response, next) => {
|
||||||
|
try {
|
||||||
|
response.json(
|
||||||
|
await store.createWorkflowTemplate({
|
||||||
|
workspaceId: request.body.workspaceId,
|
||||||
|
projectId: request.body.projectId,
|
||||||
|
name: request.body.name,
|
||||||
|
description: request.body.description,
|
||||||
|
visualGraph: request.body.visualGraph ?? {},
|
||||||
|
logicGraph: request.body.logicGraph,
|
||||||
|
runtimeGraph: request.body.runtimeGraph ?? {},
|
||||||
|
pluginRefs: request.body.pluginRefs ?? [],
|
||||||
|
createdBy: request.body.createdBy ?? "local-user",
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
app.get("/api/workflow-templates", async (request, response, next) => {
|
||||||
|
try {
|
||||||
|
response.json(
|
||||||
|
await store.listWorkflowTemplates({
|
||||||
|
workspaceId: String(request.query.workspaceId),
|
||||||
|
projectId: request.query.projectId ? String(request.query.projectId) : undefined,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
app.get("/api/workflow-templates/:templateId", async (request, response, next) => {
|
||||||
|
try {
|
||||||
|
const template = await store.getWorkflowTemplate(request.params.templateId);
|
||||||
|
if (!template) {
|
||||||
|
response.status(404).json({ message: "workflow template not found" });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
response.json(template);
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
app.post("/api/workflow-templates/:templateId/workflows", async (request, response, next) => {
|
||||||
|
try {
|
||||||
|
response.json(
|
||||||
|
await store.createWorkflowFromTemplate({
|
||||||
|
templateId: request.params.templateId,
|
||||||
|
workspaceId: request.body.workspaceId,
|
||||||
|
projectId: request.body.projectId,
|
||||||
|
name: request.body.name,
|
||||||
|
createdBy: request.body.createdBy ?? "local-user",
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
app.post("/api/workflows", async (request, response, next) => {
|
app.post("/api/workflows", async (request, response, next) => {
|
||||||
@ -223,6 +396,22 @@ export async function createApiRuntime(config = resolveApiRuntimeConfig()) {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
app.post("/api/runs/preflight", async (request, response, next) => {
|
||||||
|
try {
|
||||||
|
response.json(
|
||||||
|
await store.preflightRun({
|
||||||
|
workflowDefinitionId: request.body.workflowDefinitionId,
|
||||||
|
workflowVersionId: request.body.workflowVersionId,
|
||||||
|
inputBindings: request.body.inputBindings ?? [],
|
||||||
|
assetIds: request.body.assetIds ?? [],
|
||||||
|
datasetIds: request.body.datasetIds ?? [],
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
next(error);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
app.post("/api/runs", async (request, response, next) => {
|
app.post("/api/runs", async (request, response, next) => {
|
||||||
try {
|
try {
|
||||||
response.json(
|
response.json(
|
||||||
@ -230,7 +419,9 @@ export async function createApiRuntime(config = resolveApiRuntimeConfig()) {
|
|||||||
workflowDefinitionId: request.body.workflowDefinitionId,
|
workflowDefinitionId: request.body.workflowDefinitionId,
|
||||||
workflowVersionId: request.body.workflowVersionId,
|
workflowVersionId: request.body.workflowVersionId,
|
||||||
triggeredBy: request.body.triggeredBy ?? "local-user",
|
triggeredBy: request.body.triggeredBy ?? "local-user",
|
||||||
|
inputBindings: request.body.inputBindings ?? [],
|
||||||
assetIds: request.body.assetIds ?? [],
|
assetIds: request.body.assetIds ?? [],
|
||||||
|
datasetIds: request.body.datasetIds ?? [],
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@ -6,6 +6,15 @@ import {
|
|||||||
WORKFLOW_RUN_STATUSES,
|
WORKFLOW_RUN_STATUSES,
|
||||||
WORKSPACE_TYPES,
|
WORKSPACE_TYPES,
|
||||||
} from "../../../packages/contracts/src/domain.ts";
|
} from "../../../packages/contracts/src/domain.ts";
|
||||||
|
import {
|
||||||
|
buildCustomNodeEnvelopePreview,
|
||||||
|
formatCustomNodeValidationIssue,
|
||||||
|
validateCustomNodeDefinition,
|
||||||
|
} from "../../../packages/contracts/src/custom-node.ts";
|
||||||
|
import {
|
||||||
|
normalizeWorkflowInputBindings,
|
||||||
|
splitWorkflowInputBindings,
|
||||||
|
} from "../../../packages/contracts/src/workflow-input.ts";
|
||||||
import { createMongoConnectionUri } from "../src/common/mongo/mongo.module.ts";
|
import { createMongoConnectionUri } from "../src/common/mongo/mongo.module.ts";
|
||||||
import {
|
import {
|
||||||
ASSET_COLLECTION_NAME,
|
ASSET_COLLECTION_NAME,
|
||||||
@ -50,3 +59,91 @@ test("schema collection names match the core domain objects", () => {
|
|||||||
assert.equal(ASSET_COLLECTION_NAME, "assets");
|
assert.equal(ASSET_COLLECTION_NAME, "assets");
|
||||||
assert.equal(WORKFLOW_DEFINITION_COLLECTION_NAME, "workflow_definitions");
|
assert.equal(WORKFLOW_DEFINITION_COLLECTION_NAME, "workflow_definitions");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("custom node validation accepts a valid docker image utility node", () => {
|
||||||
|
const issues = validateCustomNodeDefinition({
|
||||||
|
name: "Merge Assets",
|
||||||
|
category: "Utility",
|
||||||
|
source: {
|
||||||
|
kind: "image",
|
||||||
|
image: "python:3.11-alpine",
|
||||||
|
command: ["python3", "-c", "print('merge')"],
|
||||||
|
},
|
||||||
|
contract: {
|
||||||
|
inputMode: "multi_asset_set",
|
||||||
|
outputMode: "asset_set",
|
||||||
|
artifactType: "json",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.deepEqual(issues, []);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("custom node validation rejects invalid dockerfile and impossible source contract combinations", () => {
|
||||||
|
const issues = validateCustomNodeDefinition({
|
||||||
|
name: "Bad Source",
|
||||||
|
category: "Source",
|
||||||
|
source: {
|
||||||
|
kind: "dockerfile",
|
||||||
|
dockerfileContent: "CMD [\"python3\"]",
|
||||||
|
},
|
||||||
|
contract: {
|
||||||
|
inputMode: "multi_asset_set",
|
||||||
|
outputMode: "report",
|
||||||
|
artifactType: "json",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.deepEqual(issues, ["source_cannot_be_multi_input", "dockerfile_missing_from"]);
|
||||||
|
assert.equal(
|
||||||
|
formatCustomNodeValidationIssue("dockerfile_missing_from"),
|
||||||
|
"custom node dockerfile must include a FROM instruction",
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("custom node envelope preview reflects the declared input and output contract", () => {
|
||||||
|
const preview = buildCustomNodeEnvelopePreview({
|
||||||
|
inputMode: "multi_asset_set",
|
||||||
|
outputMode: "asset_set_with_report",
|
||||||
|
artifactType: "json",
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.deepEqual(preview.input.context.assetIds, ["asset-123"]);
|
||||||
|
assert.deepEqual(preview.input.context.upstreamResults[0]?.result?.assetIds, ["asset-123"]);
|
||||||
|
assert.deepEqual(preview.output.result.assetIds, ["asset-123"]);
|
||||||
|
assert.equal(preview.output.result.artifactType, "json");
|
||||||
|
assert.equal(typeof preview.output.result.report, "object");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("workflow input bindings normalize legacy asset and dataset inputs into one contract", () => {
|
||||||
|
const bindings = normalizeWorkflowInputBindings({
|
||||||
|
assetIds: ["asset-1", "asset-1", ""],
|
||||||
|
datasetIds: ["dataset-1", "dataset-2", "dataset-1"],
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.deepEqual(bindings, [
|
||||||
|
{ kind: "asset", id: "asset-1" },
|
||||||
|
{ kind: "dataset", id: "dataset-1" },
|
||||||
|
{ kind: "dataset", id: "dataset-2" },
|
||||||
|
]);
|
||||||
|
assert.deepEqual(splitWorkflowInputBindings(bindings), {
|
||||||
|
assetIds: ["asset-1"],
|
||||||
|
datasetIds: ["dataset-1", "dataset-2"],
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test("workflow input bindings prefer explicit bindings over legacy fallback arrays", () => {
|
||||||
|
const bindings = normalizeWorkflowInputBindings({
|
||||||
|
inputBindings: [
|
||||||
|
{ kind: "dataset", id: "dataset-9" },
|
||||||
|
{ kind: "dataset", id: "dataset-9" },
|
||||||
|
{ kind: "asset", id: "asset-2" },
|
||||||
|
],
|
||||||
|
assetIds: ["asset-legacy"],
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.deepEqual(bindings, [
|
||||||
|
{ kind: "dataset", id: "dataset-9" },
|
||||||
|
{ kind: "asset", id: "asset-2" },
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|||||||
@ -96,6 +96,67 @@ test("mongo-backed runtime reuses bootstrapped workspace and project across rest
|
|||||||
assert.equal(projects[0]?._id, bootstrap.project._id);
|
assert.equal(projects[0]?._id, bootstrap.project._id);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("mongo-backed runtime provisions a default workflow template for newly created projects", async (t) => {
|
||||||
|
const mongod = await MongoMemoryServer.create({
|
||||||
|
instance: {
|
||||||
|
ip: "127.0.0.1",
|
||||||
|
port: 27217,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
t.after(async () => {
|
||||||
|
await mongod.stop();
|
||||||
|
});
|
||||||
|
|
||||||
|
const server = await startRuntimeServer({
|
||||||
|
host: "127.0.0.1",
|
||||||
|
port: 0,
|
||||||
|
mongoUri: mongod.getUri(),
|
||||||
|
database: "emboflow-runtime-project-template",
|
||||||
|
corsOrigin: "http://127.0.0.1:3000",
|
||||||
|
});
|
||||||
|
t.after(async () => {
|
||||||
|
await server.close();
|
||||||
|
});
|
||||||
|
|
||||||
|
const bootstrap = await readJson<{
|
||||||
|
workspace: { _id: string };
|
||||||
|
}>(
|
||||||
|
await fetch(`${server.baseUrl}/api/dev/bootstrap`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify({ userId: "project-template-user", projectName: "Seed Project" }),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const project = await readJson<{ _id: string }>(
|
||||||
|
await fetch(`${server.baseUrl}/api/projects`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify({
|
||||||
|
workspaceId: bootstrap.workspace._id,
|
||||||
|
name: "Second Project",
|
||||||
|
description: "Project created after bootstrap",
|
||||||
|
createdBy: "project-template-user",
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const templates = await readJson<Array<{ slug: string; projectId?: string }>>(
|
||||||
|
await fetch(
|
||||||
|
`${server.baseUrl}/api/workflow-templates?workspaceId=${encodeURIComponent(bootstrap.workspace._id)}&projectId=${encodeURIComponent(project._id)}`,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
assert.equal(
|
||||||
|
templates.some((template) => template.projectId === project._id),
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
assert.equal(
|
||||||
|
templates.some((template) => template.slug === "delivery-normalization-template"),
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
test("mongo-backed runtime persists probed assets and workflow runs through the HTTP API", async (t) => {
|
test("mongo-backed runtime persists probed assets and workflow runs through the HTTP API", async (t) => {
|
||||||
const sourceDir = await mkdtemp(path.join(os.tmpdir(), "emboflow-runtime-"));
|
const sourceDir = await mkdtemp(path.join(os.tmpdir(), "emboflow-runtime-"));
|
||||||
await mkdir(path.join(sourceDir, "DJI_001"));
|
await mkdir(path.join(sourceDir, "DJI_001"));
|
||||||
@ -225,7 +286,7 @@ test("mongo-backed runtime persists probed assets and workflow runs through the
|
|||||||
assert.deepEqual((run as { assetIds?: string[] }).assetIds, [asset._id]);
|
assert.deepEqual((run as { assetIds?: string[] }).assetIds, [asset._id]);
|
||||||
assert.equal(tasks.length, 3);
|
assert.equal(tasks.length, 3);
|
||||||
assert.equal(tasks[0]?.nodeId, "source-asset");
|
assert.equal(tasks[0]?.nodeId, "source-asset");
|
||||||
assert.equal(tasks[0]?.executorType, "python");
|
assert.equal(tasks[0]?.executorType, "docker");
|
||||||
assert.deepEqual(tasks[0]?.assetIds, [asset._id]);
|
assert.deepEqual(tasks[0]?.assetIds, [asset._id]);
|
||||||
assert.deepEqual(tasks[0]?.upstreamNodeIds, []);
|
assert.deepEqual(tasks[0]?.upstreamNodeIds, []);
|
||||||
assert.equal(tasks[0]?.status, "queued");
|
assert.equal(tasks[0]?.status, "queued");
|
||||||
@ -482,6 +543,169 @@ test("mongo-backed runtime rejects workflow runs without bound assets", async (t
|
|||||||
assert.match(await response.text(), /assetIds/i);
|
assert.match(await response.text(), /assetIds/i);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("mongo-backed runtime accepts dataset bindings and resolves them into run assets", async (t) => {
|
||||||
|
const sourceDir = await mkdtemp(path.join(os.tmpdir(), "emboflow-runtime-dataset-inputs-"));
|
||||||
|
await mkdir(path.join(sourceDir, "DJI_001"));
|
||||||
|
await writeFile(path.join(sourceDir, "meta.json"), "{}");
|
||||||
|
await writeFile(path.join(sourceDir, "intrinsics.json"), "{}");
|
||||||
|
await writeFile(path.join(sourceDir, "video_meta.json"), "{}");
|
||||||
|
|
||||||
|
const mongod = await MongoMemoryServer.create({
|
||||||
|
instance: {
|
||||||
|
ip: "127.0.0.1",
|
||||||
|
port: 27129,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
t.after(async () => {
|
||||||
|
await mongod.stop();
|
||||||
|
});
|
||||||
|
|
||||||
|
const server = await startRuntimeServer({
|
||||||
|
host: "127.0.0.1",
|
||||||
|
port: 0,
|
||||||
|
mongoUri: mongod.getUri(),
|
||||||
|
database: "emboflow-runtime-dataset-inputs",
|
||||||
|
corsOrigin: "http://127.0.0.1:3000",
|
||||||
|
});
|
||||||
|
t.after(async () => {
|
||||||
|
await server.close();
|
||||||
|
});
|
||||||
|
|
||||||
|
const bootstrap = await readJson<{
|
||||||
|
workspace: { _id: string };
|
||||||
|
project: { _id: string };
|
||||||
|
}>(
|
||||||
|
await fetch(`${server.baseUrl}/api/dev/bootstrap`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify({ userId: "dataset-run-user", projectName: "Dataset Run Project" }),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const storageConnection = await readJson<{ _id: string }>(
|
||||||
|
await fetch(`${server.baseUrl}/api/storage-connections`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify({
|
||||||
|
workspaceId: bootstrap.workspace._id,
|
||||||
|
name: "Local Dataset Storage",
|
||||||
|
provider: "local",
|
||||||
|
rootPath: sourceDir,
|
||||||
|
createdBy: "dataset-run-user",
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const asset = await readJson<{ _id: string }>(
|
||||||
|
await fetch(`${server.baseUrl}/api/assets/register`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify({
|
||||||
|
workspaceId: bootstrap.workspace._id,
|
||||||
|
projectId: bootstrap.project._id,
|
||||||
|
sourcePath: sourceDir,
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const dataset = await readJson<{ _id: string }>(
|
||||||
|
await fetch(`${server.baseUrl}/api/datasets`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify({
|
||||||
|
workspaceId: bootstrap.workspace._id,
|
||||||
|
projectId: bootstrap.project._id,
|
||||||
|
name: "Customer Delivery Dataset",
|
||||||
|
sourceAssetIds: [asset._id],
|
||||||
|
storageConnectionId: storageConnection._id,
|
||||||
|
storagePath: "datasets/customer-delivery",
|
||||||
|
createdBy: "dataset-run-user",
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const workflow = await readJson<{ _id: string }>(
|
||||||
|
await fetch(`${server.baseUrl}/api/workflows`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify({
|
||||||
|
workspaceId: bootstrap.workspace._id,
|
||||||
|
projectId: bootstrap.project._id,
|
||||||
|
name: "Dataset Bound Workflow",
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const version = await readJson<{ _id: string }>(
|
||||||
|
await fetch(`${server.baseUrl}/api/workflows/${workflow._id}/versions`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify({
|
||||||
|
visualGraph: { viewport: { x: 0, y: 0, zoom: 1 } },
|
||||||
|
logicGraph: {
|
||||||
|
nodes: [
|
||||||
|
{ id: "source-asset", type: "source" },
|
||||||
|
{ id: "validate-structure", type: "inspect" },
|
||||||
|
],
|
||||||
|
edges: [{ from: "source-asset", to: "validate-structure" }],
|
||||||
|
},
|
||||||
|
runtimeGraph: { selectedPreset: "delivery-normalization" },
|
||||||
|
pluginRefs: ["builtin:delivery-nodes"],
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const preflight = await readJson<{
|
||||||
|
ok: boolean;
|
||||||
|
issues: Array<{ code: string }>;
|
||||||
|
}>(
|
||||||
|
await fetch(`${server.baseUrl}/api/runs/preflight`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify({
|
||||||
|
workflowDefinitionId: workflow._id,
|
||||||
|
workflowVersionId: version._id,
|
||||||
|
inputBindings: [{ kind: "dataset", id: dataset._id }],
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const run = await readJson<{
|
||||||
|
_id: string;
|
||||||
|
assetIds: string[];
|
||||||
|
datasetIds: string[];
|
||||||
|
inputBindings: Array<{ kind: string; id: string }>;
|
||||||
|
}>(
|
||||||
|
await fetch(`${server.baseUrl}/api/runs`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify({
|
||||||
|
workflowDefinitionId: workflow._id,
|
||||||
|
workflowVersionId: version._id,
|
||||||
|
inputBindings: [{ kind: "dataset", id: dataset._id }],
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const tasks = await readJson<
|
||||||
|
Array<{
|
||||||
|
nodeId: string;
|
||||||
|
assetIds: string[];
|
||||||
|
datasetIds: string[];
|
||||||
|
}>
|
||||||
|
>(await fetch(`${server.baseUrl}/api/runs/${run._id}/tasks`));
|
||||||
|
|
||||||
|
assert.equal(preflight.ok, true);
|
||||||
|
assert.deepEqual(preflight.issues, []);
|
||||||
|
assert.deepEqual(run.inputBindings, [{ kind: "dataset", id: dataset._id }]);
|
||||||
|
assert.deepEqual(run.datasetIds, [dataset._id]);
|
||||||
|
assert.deepEqual(run.assetIds, [asset._id]);
|
||||||
|
assert.deepEqual(tasks[0]?.datasetIds, [dataset._id]);
|
||||||
|
assert.deepEqual(tasks[0]?.assetIds, [asset._id]);
|
||||||
|
assert.deepEqual(tasks[1]?.datasetIds, [dataset._id]);
|
||||||
|
assert.deepEqual(tasks[1]?.assetIds, [asset._id]);
|
||||||
|
});
|
||||||
|
|
||||||
test("mongo-backed runtime lists recent runs for a project", async (t) => {
|
test("mongo-backed runtime lists recent runs for a project", async (t) => {
|
||||||
const sourceDir = await mkdtemp(path.join(os.tmpdir(), "emboflow-runtime-runs-"));
|
const sourceDir = await mkdtemp(path.join(os.tmpdir(), "emboflow-runtime-runs-"));
|
||||||
await mkdir(path.join(sourceDir, "DJI_001"));
|
await mkdir(path.join(sourceDir, "DJI_001"));
|
||||||
@ -826,6 +1050,191 @@ test("mongo-backed runtime exposes persisted task execution summaries and logs",
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("mongo-backed runtime supports storage connections, datasets, workflow templates, and workflow creation from templates", async (t) => {
|
||||||
|
const sourceDir = await mkdtemp(path.join(os.tmpdir(), "emboflow-runtime-datasets-"));
|
||||||
|
await mkdir(path.join(sourceDir, "DJI_001"));
|
||||||
|
await writeFile(path.join(sourceDir, "meta.json"), "{}");
|
||||||
|
await writeFile(path.join(sourceDir, "intrinsics.json"), "{}");
|
||||||
|
await writeFile(path.join(sourceDir, "video_meta.json"), "{}");
|
||||||
|
await writeFile(path.join(sourceDir, "DJI_001", "DJI_001.mp4"), "");
|
||||||
|
|
||||||
|
const mongod = await MongoMemoryServer.create({
|
||||||
|
instance: {
|
||||||
|
ip: "127.0.0.1",
|
||||||
|
port: 27125,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
t.after(async () => {
|
||||||
|
await mongod.stop();
|
||||||
|
});
|
||||||
|
|
||||||
|
const server = await startRuntimeServer({
|
||||||
|
host: "127.0.0.1",
|
||||||
|
port: 0,
|
||||||
|
mongoUri: mongod.getUri(),
|
||||||
|
database: "emboflow-runtime-datasets-templates",
|
||||||
|
corsOrigin: "http://127.0.0.1:3000",
|
||||||
|
});
|
||||||
|
t.after(async () => {
|
||||||
|
await server.close();
|
||||||
|
});
|
||||||
|
|
||||||
|
const bootstrap = await readJson<{
|
||||||
|
workspace: { _id: string };
|
||||||
|
project: { _id: string };
|
||||||
|
}>(
|
||||||
|
await fetch(`${server.baseUrl}/api/dev/bootstrap`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify({ userId: "dataset-user", projectName: "Dataset Project" }),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const connections = await readJson<Array<{ _id: string; provider: string; name: string }>>(
|
||||||
|
await fetch(
|
||||||
|
`${server.baseUrl}/api/storage-connections?workspaceId=${encodeURIComponent(bootstrap.workspace._id)}`,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
const cloudConnection = await readJson<{ _id: string; provider: string; bucket: string }>(
|
||||||
|
await fetch(`${server.baseUrl}/api/storage-connections`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify({
|
||||||
|
workspaceId: bootstrap.workspace._id,
|
||||||
|
name: "Project OSS",
|
||||||
|
provider: "oss",
|
||||||
|
bucket: "emboflow-datasets",
|
||||||
|
endpoint: "oss-cn-hangzhou.aliyuncs.com",
|
||||||
|
basePath: "datasets/project-a",
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const asset = await readJson<{ _id: string; displayName: string }>(
|
||||||
|
await fetch(`${server.baseUrl}/api/assets/register`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify({
|
||||||
|
workspaceId: bootstrap.workspace._id,
|
||||||
|
projectId: bootstrap.project._id,
|
||||||
|
sourcePath: sourceDir,
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
await readJson(await fetch(`${server.baseUrl}/api/assets/${asset._id}/probe`, { method: "POST" }));
|
||||||
|
|
||||||
|
const dataset = await readJson<{
|
||||||
|
_id: string;
|
||||||
|
latestVersionNumber: number;
|
||||||
|
storageConnectionId: string;
|
||||||
|
}>(
|
||||||
|
await fetch(`${server.baseUrl}/api/datasets`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify({
|
||||||
|
workspaceId: bootstrap.workspace._id,
|
||||||
|
projectId: bootstrap.project._id,
|
||||||
|
name: "Delivery Dataset",
|
||||||
|
description: "Dataset derived from the probed delivery asset",
|
||||||
|
sourceAssetIds: [asset._id],
|
||||||
|
storageConnectionId: cloudConnection._id,
|
||||||
|
storagePath: "delivery/dataset-v1",
|
||||||
|
createdBy: "dataset-user",
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const datasets = await readJson<Array<{ _id: string; latestVersionNumber: number }>>(
|
||||||
|
await fetch(`${server.baseUrl}/api/datasets?projectId=${encodeURIComponent(bootstrap.project._id)}`),
|
||||||
|
);
|
||||||
|
const datasetVersions = await readJson<Array<{ datasetId: string; versionNumber: number }>>(
|
||||||
|
await fetch(`${server.baseUrl}/api/datasets/${dataset._id}/versions`),
|
||||||
|
);
|
||||||
|
|
||||||
|
const template = await readJson<{ _id: string; name: string }>(
|
||||||
|
await fetch(`${server.baseUrl}/api/workflow-templates`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify({
|
||||||
|
workspaceId: bootstrap.workspace._id,
|
||||||
|
projectId: bootstrap.project._id,
|
||||||
|
name: "Delivery Review Template",
|
||||||
|
description: "Template with inspect and export nodes",
|
||||||
|
visualGraph: {
|
||||||
|
viewport: { x: 0, y: 0, zoom: 1 },
|
||||||
|
nodePositions: {
|
||||||
|
"source-asset": { x: 120, y: 120 },
|
||||||
|
"validate-structure": { x: 460, y: 220 },
|
||||||
|
"export-delivery-package": { x: 820, y: 340 },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
logicGraph: {
|
||||||
|
nodes: [
|
||||||
|
{ id: "source-asset", type: "source" },
|
||||||
|
{ id: "validate-structure", type: "inspect" },
|
||||||
|
{ id: "export-delivery-package", type: "export" },
|
||||||
|
],
|
||||||
|
edges: [
|
||||||
|
{ from: "source-asset", to: "validate-structure" },
|
||||||
|
{ from: "validate-structure", to: "export-delivery-package" },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
runtimeGraph: {
|
||||||
|
selectedPreset: "delivery-template",
|
||||||
|
nodeConfigs: {
|
||||||
|
"validate-structure": {
|
||||||
|
executorType: "python",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
pluginRefs: ["builtin:delivery-nodes"],
|
||||||
|
createdBy: "dataset-user",
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const templates = await readJson<Array<{ _id: string; name: string }>>(
|
||||||
|
await fetch(
|
||||||
|
`${server.baseUrl}/api/workflow-templates?workspaceId=${encodeURIComponent(bootstrap.workspace._id)}&projectId=${encodeURIComponent(bootstrap.project._id)}`,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
const workflowFromTemplate = await readJson<{ _id: string; name: string; latestVersionNumber: number }>(
|
||||||
|
await fetch(`${server.baseUrl}/api/workflow-templates/${template._id}/workflows`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify({
|
||||||
|
workspaceId: bootstrap.workspace._id,
|
||||||
|
projectId: bootstrap.project._id,
|
||||||
|
name: "Delivery Review Flow",
|
||||||
|
createdBy: "dataset-user",
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const workflowVersions = await readJson<Array<{ versionNumber: number; runtimeGraph?: { selectedPreset?: string } }>>(
|
||||||
|
await fetch(`${server.baseUrl}/api/workflows/${workflowFromTemplate._id}/versions`),
|
||||||
|
);
|
||||||
|
|
||||||
|
assert.equal(connections[0]?.provider, "local");
|
||||||
|
assert.equal(cloudConnection.provider, "oss");
|
||||||
|
assert.equal(cloudConnection.bucket, "emboflow-datasets");
|
||||||
|
assert.equal(dataset.storageConnectionId, cloudConnection._id);
|
||||||
|
assert.equal(dataset.latestVersionNumber, 1);
|
||||||
|
assert.equal(datasets.length, 1);
|
||||||
|
assert.equal(datasets[0]?._id, dataset._id);
|
||||||
|
assert.equal(datasetVersions.length, 1);
|
||||||
|
assert.equal(datasetVersions[0]?.datasetId, dataset._id);
|
||||||
|
assert.equal(datasetVersions[0]?.versionNumber, 1);
|
||||||
|
assert.equal(template.name, "Delivery Review Template");
|
||||||
|
assert.equal(templates.some((item) => item._id === template._id), true);
|
||||||
|
assert.equal(workflowFromTemplate.latestVersionNumber, 1);
|
||||||
|
assert.equal(workflowVersions.length, 1);
|
||||||
|
assert.equal(workflowVersions[0]?.versionNumber, 1);
|
||||||
|
assert.equal(workflowVersions[0]?.runtimeGraph?.selectedPreset, "delivery-template");
|
||||||
|
});
|
||||||
|
|
||||||
test("mongo-backed runtime can cancel a run, retry a run snapshot, and retry a failed task", async (t) => {
|
test("mongo-backed runtime can cancel a run, retry a run snapshot, and retry a failed task", async (t) => {
|
||||||
const mongod = await MongoMemoryServer.create({
|
const mongod = await MongoMemoryServer.create({
|
||||||
instance: {
|
instance: {
|
||||||
@ -1081,3 +1490,306 @@ test("mongo-backed runtime can cancel a run, retry a run snapshot, and retry a f
|
|||||||
assert.match(refreshedFailedTask?.logLines?.[0] ?? "", /retry/i);
|
assert.match(refreshedFailedTask?.logLines?.[0] ?? "", /retry/i);
|
||||||
assert.equal(refreshedDownstreamTask?.status, "pending");
|
assert.equal(refreshedDownstreamTask?.status, "pending");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("mongo-backed runtime manages custom docker nodes and exposes them as project node definitions", async (t) => {
|
||||||
|
const mongod = await MongoMemoryServer.create({
|
||||||
|
instance: {
|
||||||
|
ip: "127.0.0.1",
|
||||||
|
port: 27131,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
t.after(async () => {
|
||||||
|
await mongod.stop();
|
||||||
|
});
|
||||||
|
|
||||||
|
const server = await startRuntimeServer({
|
||||||
|
host: "127.0.0.1",
|
||||||
|
port: 0,
|
||||||
|
mongoUri: mongod.getUri(),
|
||||||
|
database: "emboflow-runtime-custom-nodes",
|
||||||
|
corsOrigin: "http://127.0.0.1:3000",
|
||||||
|
});
|
||||||
|
t.after(async () => {
|
||||||
|
await server.close();
|
||||||
|
});
|
||||||
|
|
||||||
|
const bootstrap = await readJson<{
|
||||||
|
workspace: { _id: string };
|
||||||
|
project: { _id: string };
|
||||||
|
}>(
|
||||||
|
await fetch(`${server.baseUrl}/api/dev/bootstrap`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify({ userId: "custom-node-user", projectName: "Custom Node Project" }),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const imageNode = await readJson<{ _id: string; definitionId: string }>(
|
||||||
|
await fetch(`${server.baseUrl}/api/custom-nodes`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify({
|
||||||
|
workspaceId: bootstrap.workspace._id,
|
||||||
|
projectId: bootstrap.project._id,
|
||||||
|
name: "Merge Labels",
|
||||||
|
description: "Combine label reports from upstream nodes",
|
||||||
|
category: "Utility",
|
||||||
|
source: {
|
||||||
|
kind: "image",
|
||||||
|
image: "python:3.11-alpine",
|
||||||
|
command: ["python3", "-c", "print('custom image node')"],
|
||||||
|
},
|
||||||
|
contract: {
|
||||||
|
inputMode: "multi_asset_set",
|
||||||
|
outputMode: "asset_set_with_report",
|
||||||
|
artifactType: "json",
|
||||||
|
},
|
||||||
|
createdBy: "custom-node-user",
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const dockerfileNode = await readJson<{ _id: string; definitionId: string }>(
|
||||||
|
await fetch(`${server.baseUrl}/api/custom-nodes`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify({
|
||||||
|
workspaceId: bootstrap.workspace._id,
|
||||||
|
projectId: bootstrap.project._id,
|
||||||
|
name: "Dockerfile Union",
|
||||||
|
description: "Union assets with a self-contained Dockerfile",
|
||||||
|
category: "Utility",
|
||||||
|
source: {
|
||||||
|
kind: "dockerfile",
|
||||||
|
imageTag: "emboflow-test/dockerfile-union:latest",
|
||||||
|
dockerfileContent: [
|
||||||
|
"FROM python:3.11-alpine",
|
||||||
|
"CMD [\"python3\", \"-c\", \"print('dockerfile custom node')\"]",
|
||||||
|
].join("\n"),
|
||||||
|
},
|
||||||
|
contract: {
|
||||||
|
inputMode: "multi_asset_set",
|
||||||
|
outputMode: "asset_set",
|
||||||
|
artifactType: "json",
|
||||||
|
},
|
||||||
|
createdBy: "custom-node-user",
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const nodeDefinitions = await readJson<
|
||||||
|
Array<{
|
||||||
|
id: string;
|
||||||
|
defaultExecutorType?: string;
|
||||||
|
defaultExecutorConfig?: Record<string, unknown>;
|
||||||
|
allowsMultipleIncoming?: boolean;
|
||||||
|
}>
|
||||||
|
>(
|
||||||
|
await fetch(
|
||||||
|
`${server.baseUrl}/api/node-definitions?projectId=${encodeURIComponent(bootstrap.project._id)}`,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
const imageDefinition = nodeDefinitions.find((definition) => definition.id === imageNode.definitionId);
|
||||||
|
const dockerfileDefinition = nodeDefinitions.find((definition) => definition.id === dockerfileNode.definitionId);
|
||||||
|
|
||||||
|
assert.equal(imageDefinition?.defaultExecutorType, "docker");
|
||||||
|
assert.equal(imageDefinition?.allowsMultipleIncoming, true);
|
||||||
|
assert.equal(imageDefinition?.defaultExecutorConfig?.image, "python:3.11-alpine");
|
||||||
|
assert.equal(
|
||||||
|
(imageDefinition?.defaultExecutorConfig?.contract as { outputMode?: string } | undefined)?.outputMode,
|
||||||
|
"asset_set_with_report",
|
||||||
|
);
|
||||||
|
assert.equal(dockerfileDefinition?.defaultExecutorType, "docker");
|
||||||
|
assert.equal(
|
||||||
|
typeof dockerfileDefinition?.defaultExecutorConfig?.dockerfileContent,
|
||||||
|
"string",
|
||||||
|
);
|
||||||
|
assert.equal(
|
||||||
|
(dockerfileDefinition?.defaultExecutorConfig?.contract as { outputMode?: string } | undefined)?.outputMode,
|
||||||
|
"asset_set",
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("mongo-backed runtime rejects invalid custom node definitions with a 400 error", async (t) => {
|
||||||
|
const { MongoMemoryServer } = await import("mongodb-memory-server");
|
||||||
|
const mongod = await MongoMemoryServer.create();
|
||||||
|
t.after(async () => {
|
||||||
|
await mongod.stop();
|
||||||
|
});
|
||||||
|
|
||||||
|
const server = await startRuntimeServer({
|
||||||
|
host: "127.0.0.1",
|
||||||
|
port: 0,
|
||||||
|
mongoUri: mongod.getUri(),
|
||||||
|
database: "emboflow-runtime-custom-node-validation",
|
||||||
|
corsOrigin: "http://127.0.0.1:3000",
|
||||||
|
});
|
||||||
|
t.after(async () => {
|
||||||
|
await server.close();
|
||||||
|
});
|
||||||
|
|
||||||
|
const bootstrap = await readJson<{
|
||||||
|
workspace: { _id: string };
|
||||||
|
project: { _id: string };
|
||||||
|
}>(
|
||||||
|
await fetch(`${server.baseUrl}/api/dev/bootstrap`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify({ userId: "custom-node-validation-user", projectName: "Validation Project" }),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const response = await fetch(`${server.baseUrl}/api/custom-nodes`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify({
|
||||||
|
workspaceId: bootstrap.workspace._id,
|
||||||
|
projectId: bootstrap.project._id,
|
||||||
|
name: "Broken Source",
|
||||||
|
category: "Source",
|
||||||
|
source: {
|
||||||
|
kind: "dockerfile",
|
||||||
|
dockerfileContent: "CMD [\"python3\"]",
|
||||||
|
},
|
||||||
|
contract: {
|
||||||
|
inputMode: "multi_asset_set",
|
||||||
|
outputMode: "report",
|
||||||
|
artifactType: "json",
|
||||||
|
},
|
||||||
|
createdBy: "custom-node-validation-user",
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(response.status, 400);
|
||||||
|
const payload = (await response.json()) as { message: string };
|
||||||
|
assert.equal(payload.message, "source category custom nodes cannot declare multi_asset_set input");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("mongo-backed runtime preflights workflow runs before creation and blocks invalid executor config", async (t) => {
|
||||||
|
const sourceDir = await mkdtemp(path.join(os.tmpdir(), "emboflow-runtime-preflight-"));
|
||||||
|
await mkdir(path.join(sourceDir, "DJI_001"));
|
||||||
|
await writeFile(path.join(sourceDir, "meta.json"), "{}");
|
||||||
|
await writeFile(path.join(sourceDir, "intrinsics.json"), "{}");
|
||||||
|
await writeFile(path.join(sourceDir, "video_meta.json"), "{}");
|
||||||
|
await writeFile(path.join(sourceDir, "DJI_001", "DJI_001.mp4"), "");
|
||||||
|
|
||||||
|
const mongod = await MongoMemoryServer.create();
|
||||||
|
t.after(async () => {
|
||||||
|
await mongod.stop();
|
||||||
|
});
|
||||||
|
|
||||||
|
const server = await startRuntimeServer({
|
||||||
|
host: "127.0.0.1",
|
||||||
|
port: 0,
|
||||||
|
mongoUri: mongod.getUri(),
|
||||||
|
database: "emboflow-runtime-preflight",
|
||||||
|
corsOrigin: "http://127.0.0.1:3000",
|
||||||
|
});
|
||||||
|
t.after(async () => {
|
||||||
|
await server.close();
|
||||||
|
});
|
||||||
|
|
||||||
|
const bootstrap = await readJson<{
|
||||||
|
workspace: { _id: string };
|
||||||
|
project: { _id: string };
|
||||||
|
}>(
|
||||||
|
await fetch(`${server.baseUrl}/api/dev/bootstrap`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify({ userId: "preflight-user", projectName: "Preflight Project" }),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const asset = await readJson<{ _id: string }>(
|
||||||
|
await fetch(`${server.baseUrl}/api/assets/register`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify({
|
||||||
|
workspaceId: bootstrap.workspace._id,
|
||||||
|
projectId: bootstrap.project._id,
|
||||||
|
sourcePath: sourceDir,
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
await readJson(await fetch(`${server.baseUrl}/api/assets/${asset._id}/probe`, { method: "POST" }));
|
||||||
|
|
||||||
|
const workflow = await readJson<{ _id: string }>(
|
||||||
|
await fetch(`${server.baseUrl}/api/workflows`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify({
|
||||||
|
workspaceId: bootstrap.workspace._id,
|
||||||
|
projectId: bootstrap.project._id,
|
||||||
|
name: "Preflight Flow",
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const version = await readJson<{ _id: string }>(
|
||||||
|
await fetch(`${server.baseUrl}/api/workflows/${workflow._id}/versions`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify({
|
||||||
|
visualGraph: { viewport: { x: 0, y: 0, zoom: 1 } },
|
||||||
|
logicGraph: {
|
||||||
|
nodes: [
|
||||||
|
{ id: "source-asset", type: "source" },
|
||||||
|
{ id: "validate-structure", type: "inspect" },
|
||||||
|
{ id: "export-delivery-package", type: "export" },
|
||||||
|
],
|
||||||
|
edges: [
|
||||||
|
{ from: "source-asset", to: "validate-structure" },
|
||||||
|
{ from: "validate-structure", to: "export-delivery-package" },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
runtimeGraph: {
|
||||||
|
nodeConfigs: {
|
||||||
|
"validate-structure": {
|
||||||
|
executorType: "http",
|
||||||
|
executorConfig: {
|
||||||
|
method: "POST",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
pluginRefs: ["builtin:delivery-nodes"],
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const preflight = await readJson<{
|
||||||
|
ok: boolean;
|
||||||
|
issues: Array<{ code: string; message: string; nodeId?: string; severity: string }>;
|
||||||
|
summary: { errorCount: number; warningCount: number };
|
||||||
|
}>(
|
||||||
|
await fetch(`${server.baseUrl}/api/runs/preflight`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify({
|
||||||
|
workflowDefinitionId: workflow._id,
|
||||||
|
workflowVersionId: version._id,
|
||||||
|
assetIds: [asset._id],
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
assert.equal(preflight.ok, false);
|
||||||
|
assert.equal(preflight.summary.errorCount, 1);
|
||||||
|
assert.equal(preflight.issues[0]?.code, "http_executor_missing_url");
|
||||||
|
assert.equal(preflight.issues[0]?.nodeId, "validate-structure");
|
||||||
|
|
||||||
|
const runResponse = await fetch(`${server.baseUrl}/api/runs`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify({
|
||||||
|
workflowDefinitionId: workflow._id,
|
||||||
|
workflowVersionId: version._id,
|
||||||
|
assetIds: [asset._id],
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(runResponse.status, 400);
|
||||||
|
const runPayload = (await runResponse.json()) as { message: string };
|
||||||
|
assert.equal(runPayload.message, "node validate-structure uses the http executor without a url");
|
||||||
|
});
|
||||||
|
|||||||
@ -13,6 +13,7 @@ test("app shell renders primary navigation", () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
assert.match(html, /Assets/);
|
assert.match(html, /Assets/);
|
||||||
|
assert.match(html, /Nodes/);
|
||||||
assert.match(html, /Workflows/);
|
assert.match(html, /Workflows/);
|
||||||
assert.match(html, /Runs/);
|
assert.match(html, /Runs/);
|
||||||
assert.match(html, /Explore/);
|
assert.match(html, /Explore/);
|
||||||
|
|||||||
@ -3,6 +3,7 @@ import { renderWorkspaceSwitcher } from "../workspaces/workspace-switcher.tsx";
|
|||||||
|
|
||||||
export const PRIMARY_NAV_ITEMS = [
|
export const PRIMARY_NAV_ITEMS = [
|
||||||
"Assets",
|
"Assets",
|
||||||
|
"Nodes",
|
||||||
"Workflows",
|
"Workflows",
|
||||||
"Runs",
|
"Runs",
|
||||||
"Explore",
|
"Explore",
|
||||||
|
|||||||
@ -12,7 +12,9 @@ export type RunDetailPageInput = {
|
|||||||
id: string;
|
id: string;
|
||||||
workflowName: string;
|
workflowName: string;
|
||||||
status: string;
|
status: string;
|
||||||
|
inputBindings?: Array<{ kind: "asset" | "dataset"; id: string }>;
|
||||||
assetIds?: string[];
|
assetIds?: string[];
|
||||||
|
datasetIds?: string[];
|
||||||
durationMs?: number;
|
durationMs?: number;
|
||||||
summaryLabel?: string;
|
summaryLabel?: string;
|
||||||
canCancelRun?: boolean;
|
canCancelRun?: boolean;
|
||||||
@ -33,7 +35,9 @@ export function renderRunDetailPage(input: RunDetailPageInput): string {
|
|||||||
<h1>${input.run.workflowName}</h1>
|
<h1>${input.run.workflowName}</h1>
|
||||||
<p>Run ${input.run.id}</p>
|
<p>Run ${input.run.id}</p>
|
||||||
<p>Status: ${input.run.status}</p>
|
<p>Status: ${input.run.status}</p>
|
||||||
|
<p>Input sources: ${(input.run.inputBindings ?? []).map((binding) => `${binding.kind}:${binding.id}`).join(", ") || "none"}</p>
|
||||||
<p>Input assets: ${(input.run.assetIds ?? []).join(", ") || "none"}</p>
|
<p>Input assets: ${(input.run.assetIds ?? []).join(", ") || "none"}</p>
|
||||||
|
<p>Input datasets: ${(input.run.datasetIds ?? []).join(", ") || "none"}</p>
|
||||||
<p>Run duration: ${typeof input.run.durationMs === "number" ? `${input.run.durationMs} ms` : "n/a"}</p>
|
<p>Run duration: ${typeof input.run.durationMs === "number" ? `${input.run.durationMs} ms` : "n/a"}</p>
|
||||||
${input.run.summaryLabel ? `<p>Run summary: ${input.run.summaryLabel}</p>` : ""}
|
${input.run.summaryLabel ? `<p>Run summary: ${input.run.summaryLabel}</p>` : ""}
|
||||||
${input.run.canCancelRun ? `<button type="button">Cancel Run</button>` : ""}
|
${input.run.canCancelRun ? `<button type="button">Cancel Run</button>` : ""}
|
||||||
|
|||||||
@ -7,7 +7,9 @@ export type RunsPageInput = {
|
|||||||
id: string;
|
id: string;
|
||||||
workflowName: string;
|
workflowName: string;
|
||||||
status: string;
|
status: string;
|
||||||
|
inputBindings?: Array<{ kind: "asset" | "dataset"; id: string }>;
|
||||||
assetIds: string[];
|
assetIds: string[];
|
||||||
|
datasetIds?: string[];
|
||||||
}>;
|
}>;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -21,7 +23,9 @@ export function renderRunsPage(input: RunsPageInput): string {
|
|||||||
<article data-run-id="${run.id}">
|
<article data-run-id="${run.id}">
|
||||||
<a href="/runs/${run.id}"><strong>${run.workflowName}</strong></a>
|
<a href="/runs/${run.id}"><strong>${run.workflowName}</strong></a>
|
||||||
<p>Status: ${run.status}</p>
|
<p>Status: ${run.status}</p>
|
||||||
|
<p>Input sources: ${(run.inputBindings ?? []).map((binding) => `${binding.kind}:${binding.id}`).join(", ") || "none"}</p>
|
||||||
<p>Input assets: ${run.assetIds.join(", ") || "none"}</p>
|
<p>Input assets: ${run.assetIds.join(", ") || "none"}</p>
|
||||||
|
<p>Input datasets: ${(run.datasetIds ?? []).join(", ") || "none"}</p>
|
||||||
</article>
|
</article>
|
||||||
`,
|
`,
|
||||||
)
|
)
|
||||||
|
|||||||
@ -15,7 +15,7 @@ export const WORKFLOW_NODE_DEFINITIONS: WorkflowNodeDefinition[] = [
|
|||||||
name: "Source Asset",
|
name: "Source Asset",
|
||||||
category: "Source",
|
category: "Source",
|
||||||
description: "Load an uploaded asset or registered storage path.",
|
description: "Load an uploaded asset or registered storage path.",
|
||||||
executorType: "python",
|
executorType: "docker",
|
||||||
inputSchemaSummary: "assetRef",
|
inputSchemaSummary: "assetRef",
|
||||||
outputSchemaSummary: "assetRef",
|
outputSchemaSummary: "assetRef",
|
||||||
},
|
},
|
||||||
@ -34,7 +34,7 @@ export const WORKFLOW_NODE_DEFINITIONS: WorkflowNodeDefinition[] = [
|
|||||||
name: "Rename Delivery Folder",
|
name: "Rename Delivery Folder",
|
||||||
category: "Transform",
|
category: "Transform",
|
||||||
description: "Rename the top-level delivery folder to the business naming convention.",
|
description: "Rename the top-level delivery folder to the business naming convention.",
|
||||||
executorType: "python",
|
executorType: "docker",
|
||||||
inputSchemaSummary: "artifactRef",
|
inputSchemaSummary: "artifactRef",
|
||||||
outputSchemaSummary: "artifactRef",
|
outputSchemaSummary: "artifactRef",
|
||||||
supportsCodeHook: true,
|
supportsCodeHook: true,
|
||||||
@ -44,17 +44,47 @@ export const WORKFLOW_NODE_DEFINITIONS: WorkflowNodeDefinition[] = [
|
|||||||
name: "Validate Structure",
|
name: "Validate Structure",
|
||||||
category: "Inspect",
|
category: "Inspect",
|
||||||
description: "Validate required directories and metadata files.",
|
description: "Validate required directories and metadata files.",
|
||||||
executorType: "python",
|
executorType: "docker",
|
||||||
inputSchemaSummary: "artifactRef",
|
inputSchemaSummary: "artifactRef",
|
||||||
outputSchemaSummary: "artifactRef + report",
|
outputSchemaSummary: "artifactRef + report",
|
||||||
supportsCodeHook: true,
|
supportsCodeHook: true,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
id: "union-assets",
|
||||||
|
name: "Union Assets",
|
||||||
|
category: "Utility",
|
||||||
|
description: "Merge multiple upstream asset sets into one deduplicated asset set.",
|
||||||
|
executorType: "docker",
|
||||||
|
inputSchemaSummary: "assetSet + assetSet",
|
||||||
|
outputSchemaSummary: "assetSet",
|
||||||
|
supportsCodeHook: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "intersect-assets",
|
||||||
|
name: "Intersect Assets",
|
||||||
|
category: "Utility",
|
||||||
|
description: "Keep only the assets shared by every upstream asset set.",
|
||||||
|
executorType: "docker",
|
||||||
|
inputSchemaSummary: "assetSet + assetSet",
|
||||||
|
outputSchemaSummary: "assetSet",
|
||||||
|
supportsCodeHook: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: "difference-assets",
|
||||||
|
name: "Difference Assets",
|
||||||
|
category: "Utility",
|
||||||
|
description: "Subtract downstream asset sets from the first upstream asset set.",
|
||||||
|
executorType: "docker",
|
||||||
|
inputSchemaSummary: "assetSet + assetSet",
|
||||||
|
outputSchemaSummary: "assetSet",
|
||||||
|
supportsCodeHook: true,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
id: "export-delivery-package",
|
id: "export-delivery-package",
|
||||||
name: "Export Delivery Package",
|
name: "Export Delivery Package",
|
||||||
category: "Export",
|
category: "Export",
|
||||||
description: "Produce the final delivery package artifact for upload.",
|
description: "Produce the final delivery package artifact for upload.",
|
||||||
executorType: "http",
|
executorType: "docker",
|
||||||
inputSchemaSummary: "artifactRef",
|
inputSchemaSummary: "artifactRef",
|
||||||
outputSchemaSummary: "artifactRef",
|
outputSchemaSummary: "artifactRef",
|
||||||
},
|
},
|
||||||
|
|||||||
@ -39,7 +39,9 @@ test("run detail view shows node status badges from run data", () => {
|
|||||||
id: "run-1",
|
id: "run-1",
|
||||||
workflowName: "Delivery Normalize",
|
workflowName: "Delivery Normalize",
|
||||||
status: "running",
|
status: "running",
|
||||||
|
inputBindings: [{ kind: "dataset", id: "dataset-1" }],
|
||||||
assetIds: ["asset-1"],
|
assetIds: ["asset-1"],
|
||||||
|
datasetIds: ["dataset-1"],
|
||||||
durationMs: 2450,
|
durationMs: 2450,
|
||||||
summaryLabel: "2 tasks complete, 1 running, 1 stdout line",
|
summaryLabel: "2 tasks complete, 1 running, 1 stdout line",
|
||||||
canCancelRun: true,
|
canCancelRun: true,
|
||||||
@ -85,7 +87,9 @@ test("run detail view shows node status badges from run data", () => {
|
|||||||
assert.match(html, /Validate Structure/);
|
assert.match(html, /Validate Structure/);
|
||||||
assert.match(html, /running/);
|
assert.match(html, /running/);
|
||||||
assert.match(html, /Checking metadata/);
|
assert.match(html, /Checking metadata/);
|
||||||
|
assert.match(html, /Input sources: dataset:dataset-1/);
|
||||||
assert.match(html, /Input assets: asset-1/);
|
assert.match(html, /Input assets: asset-1/);
|
||||||
|
assert.match(html, /Input datasets: dataset-1/);
|
||||||
assert.match(html, /Run duration: 2450 ms/);
|
assert.match(html, /Run duration: 2450 ms/);
|
||||||
assert.match(html, /2 tasks complete, 1 running, 1 stdout line/);
|
assert.match(html, /2 tasks complete, 1 running, 1 stdout line/);
|
||||||
assert.match(html, /Cancel Run/);
|
assert.match(html, /Cancel Run/);
|
||||||
@ -109,13 +113,17 @@ test("runs page renders project-scoped run history with workflow links", () => {
|
|||||||
id: "run-1",
|
id: "run-1",
|
||||||
workflowName: "Delivery Normalize",
|
workflowName: "Delivery Normalize",
|
||||||
status: "success",
|
status: "success",
|
||||||
|
inputBindings: [{ kind: "asset", id: "asset-1" }],
|
||||||
assetIds: ["asset-1"],
|
assetIds: ["asset-1"],
|
||||||
|
datasetIds: [],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: "run-2",
|
id: "run-2",
|
||||||
workflowName: "Archive Extract",
|
workflowName: "Archive Extract",
|
||||||
status: "running",
|
status: "running",
|
||||||
|
inputBindings: [{ kind: "dataset", id: "dataset-3" }],
|
||||||
assetIds: ["asset-2", "asset-3"],
|
assetIds: ["asset-2", "asset-3"],
|
||||||
|
datasetIds: ["dataset-3"],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
@ -123,6 +131,8 @@ test("runs page renders project-scoped run history with workflow links", () => {
|
|||||||
assert.match(html, /Recent workflow executions/);
|
assert.match(html, /Recent workflow executions/);
|
||||||
assert.match(html, /Delivery Normalize/);
|
assert.match(html, /Delivery Normalize/);
|
||||||
assert.match(html, /Archive Extract/);
|
assert.match(html, /Archive Extract/);
|
||||||
|
assert.match(html, /Input sources: dataset:dataset-3/);
|
||||||
assert.match(html, /Input assets: asset-2, asset-3/);
|
assert.match(html, /Input assets: asset-2, asset-3/);
|
||||||
|
assert.match(html, /Input datasets: dataset-3/);
|
||||||
assert.match(html, /\/runs\/run-2/);
|
assert.match(html, /\/runs\/run-2/);
|
||||||
});
|
});
|
||||||
|
|||||||
@ -1,3 +1,5 @@
|
|||||||
|
import type { WorkflowInputBinding } from "../../../../packages/contracts/src/workflow-input.ts";
|
||||||
|
|
||||||
export type BootstrapContext = {
|
export type BootstrapContext = {
|
||||||
userId: string;
|
userId: string;
|
||||||
workspace: { _id: string; name: string };
|
workspace: { _id: string; name: string };
|
||||||
@ -24,12 +26,61 @@ export class ApiClient {
|
|||||||
return readJson<BootstrapContext>(response);
|
return readJson<BootstrapContext>(response);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async listProjects(workspaceId: string) {
|
||||||
|
return readJson<any[]>(
|
||||||
|
await fetch(`${this.baseUrl}/api/projects?workspaceId=${encodeURIComponent(workspaceId)}`),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
async createProject(input: {
|
||||||
|
workspaceId: string;
|
||||||
|
name: string;
|
||||||
|
description?: string;
|
||||||
|
createdBy?: string;
|
||||||
|
}) {
|
||||||
|
return readJson<any>(
|
||||||
|
await fetch(`${this.baseUrl}/api/projects`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify(input),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
async listAssets(projectId: string) {
|
async listAssets(projectId: string) {
|
||||||
return readJson<any[]>(
|
return readJson<any[]>(
|
||||||
await fetch(`${this.baseUrl}/api/assets?projectId=${encodeURIComponent(projectId)}`),
|
await fetch(`${this.baseUrl}/api/assets?projectId=${encodeURIComponent(projectId)}`),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async listStorageConnections(workspaceId: string) {
|
||||||
|
return readJson<any[]>(
|
||||||
|
await fetch(
|
||||||
|
`${this.baseUrl}/api/storage-connections?workspaceId=${encodeURIComponent(workspaceId)}`,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
async createStorageConnection(input: {
|
||||||
|
workspaceId: string;
|
||||||
|
name: string;
|
||||||
|
provider: "local" | "minio" | "s3" | "bos" | "oss";
|
||||||
|
bucket?: string;
|
||||||
|
endpoint?: string;
|
||||||
|
region?: string;
|
||||||
|
basePath?: string;
|
||||||
|
rootPath?: string;
|
||||||
|
createdBy?: string;
|
||||||
|
}) {
|
||||||
|
return readJson<any>(
|
||||||
|
await fetch(`${this.baseUrl}/api/storage-connections`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify(input),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
async registerLocalAsset(input: {
|
async registerLocalAsset(input: {
|
||||||
workspaceId: string;
|
workspaceId: string;
|
||||||
projectId: string;
|
projectId: string;
|
||||||
@ -61,6 +112,41 @@ export class ApiClient {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async listDatasets(projectId: string) {
|
||||||
|
return readJson<any[]>(
|
||||||
|
await fetch(`${this.baseUrl}/api/datasets?projectId=${encodeURIComponent(projectId)}`),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
async createDataset(input: {
|
||||||
|
workspaceId: string;
|
||||||
|
projectId: string;
|
||||||
|
name: string;
|
||||||
|
description?: string;
|
||||||
|
sourceAssetIds: string[];
|
||||||
|
storageConnectionId: string;
|
||||||
|
storagePath: string;
|
||||||
|
createdBy?: string;
|
||||||
|
}) {
|
||||||
|
return readJson<any>(
|
||||||
|
await fetch(`${this.baseUrl}/api/datasets`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify(input),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
async getDataset(datasetId: string) {
|
||||||
|
return readJson<any>(await fetch(`${this.baseUrl}/api/datasets/${datasetId}`));
|
||||||
|
}
|
||||||
|
|
||||||
|
async listDatasetVersions(datasetId: string) {
|
||||||
|
return readJson<any[]>(
|
||||||
|
await fetch(`${this.baseUrl}/api/datasets/${datasetId}/versions`),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
async listWorkflows(projectId: string) {
|
async listWorkflows(projectId: string) {
|
||||||
return readJson<any[]>(
|
return readJson<any[]>(
|
||||||
await fetch(`${this.baseUrl}/api/workflows?projectId=${encodeURIComponent(projectId)}`),
|
await fetch(`${this.baseUrl}/api/workflows?projectId=${encodeURIComponent(projectId)}`),
|
||||||
@ -103,14 +189,105 @@ export class ApiClient {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
async listNodeDefinitions() {
|
async listNodeDefinitions(projectId?: string) {
|
||||||
return readJson<any[]>(await fetch(`${this.baseUrl}/api/node-definitions`));
|
const search = new URLSearchParams();
|
||||||
|
if (projectId) {
|
||||||
|
search.set("projectId", projectId);
|
||||||
|
}
|
||||||
|
return readJson<any[]>(
|
||||||
|
await fetch(`${this.baseUrl}/api/node-definitions${search.toString() ? `?${search.toString()}` : ""}`),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
async listCustomNodes(projectId: string) {
|
||||||
|
return readJson<any[]>(
|
||||||
|
await fetch(`${this.baseUrl}/api/custom-nodes?projectId=${encodeURIComponent(projectId)}`),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
async createCustomNode(input: {
|
||||||
|
workspaceId: string;
|
||||||
|
projectId: string;
|
||||||
|
name: string;
|
||||||
|
description?: string;
|
||||||
|
category?: "Source" | "Transform" | "Inspect" | "Annotate" | "Export" | "Utility";
|
||||||
|
source: Record<string, unknown>;
|
||||||
|
contract: {
|
||||||
|
inputMode: "single_asset_set" | "multi_asset_set";
|
||||||
|
outputMode: "report" | "asset_set" | "asset_set_with_report";
|
||||||
|
artifactType: "json" | "directory" | "video";
|
||||||
|
};
|
||||||
|
createdBy?: string;
|
||||||
|
}) {
|
||||||
|
return readJson<any>(
|
||||||
|
await fetch(`${this.baseUrl}/api/custom-nodes`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify(input),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
async listWorkflowTemplates(input: {
|
||||||
|
workspaceId: string;
|
||||||
|
projectId?: string;
|
||||||
|
}) {
|
||||||
|
const search = new URLSearchParams({ workspaceId: input.workspaceId });
|
||||||
|
if (input.projectId) {
|
||||||
|
search.set("projectId", input.projectId);
|
||||||
|
}
|
||||||
|
return readJson<any[]>(
|
||||||
|
await fetch(`${this.baseUrl}/api/workflow-templates?${search.toString()}`),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
async createWorkflowTemplate(input: {
|
||||||
|
workspaceId: string;
|
||||||
|
projectId?: string;
|
||||||
|
name: string;
|
||||||
|
description?: string;
|
||||||
|
visualGraph: Record<string, unknown>;
|
||||||
|
logicGraph: Record<string, unknown>;
|
||||||
|
runtimeGraph: Record<string, unknown>;
|
||||||
|
pluginRefs: string[];
|
||||||
|
createdBy?: string;
|
||||||
|
}) {
|
||||||
|
return readJson<any>(
|
||||||
|
await fetch(`${this.baseUrl}/api/workflow-templates`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify(input),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
async createWorkflowFromTemplate(input: {
|
||||||
|
templateId: string;
|
||||||
|
workspaceId: string;
|
||||||
|
projectId: string;
|
||||||
|
name: string;
|
||||||
|
createdBy?: string;
|
||||||
|
}) {
|
||||||
|
return readJson<any>(
|
||||||
|
await fetch(`${this.baseUrl}/api/workflow-templates/${input.templateId}/workflows`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify({
|
||||||
|
workspaceId: input.workspaceId,
|
||||||
|
projectId: input.projectId,
|
||||||
|
name: input.name,
|
||||||
|
createdBy: input.createdBy,
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
async createRun(input: {
|
async createRun(input: {
|
||||||
workflowDefinitionId: string;
|
workflowDefinitionId: string;
|
||||||
workflowVersionId: string;
|
workflowVersionId: string;
|
||||||
assetIds: string[];
|
inputBindings?: WorkflowInputBinding[];
|
||||||
|
assetIds?: string[];
|
||||||
|
datasetIds?: string[];
|
||||||
}) {
|
}) {
|
||||||
return readJson<any>(
|
return readJson<any>(
|
||||||
await fetch(`${this.baseUrl}/api/runs`, {
|
await fetch(`${this.baseUrl}/api/runs`, {
|
||||||
@ -121,6 +298,35 @@ export class ApiClient {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async preflightRun(input: {
|
||||||
|
workflowDefinitionId: string;
|
||||||
|
workflowVersionId: string;
|
||||||
|
inputBindings?: WorkflowInputBinding[];
|
||||||
|
assetIds?: string[];
|
||||||
|
datasetIds?: string[];
|
||||||
|
}) {
|
||||||
|
return readJson<{
|
||||||
|
ok: boolean;
|
||||||
|
issues: Array<{
|
||||||
|
severity: "error" | "warning";
|
||||||
|
code: string;
|
||||||
|
message: string;
|
||||||
|
nodeId?: string;
|
||||||
|
nodeDefinitionId?: string;
|
||||||
|
}>;
|
||||||
|
summary: {
|
||||||
|
errorCount: number;
|
||||||
|
warningCount: number;
|
||||||
|
};
|
||||||
|
}>(
|
||||||
|
await fetch(`${this.baseUrl}/api/runs/preflight`, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "content-type": "application/json" },
|
||||||
|
body: JSON.stringify(input),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
async getRun(runId: string) {
|
async getRun(runId: string) {
|
||||||
return readJson<any>(await fetch(`${this.baseUrl}/api/runs/${runId}`));
|
return readJson<any>(await fetch(`${this.baseUrl}/api/runs/${runId}`));
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
27
apps/web/src/runtime/custom-node-presenter.test.ts
Normal file
27
apps/web/src/runtime/custom-node-presenter.test.ts
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
import test from "node:test";
|
||||||
|
import assert from "node:assert/strict";
|
||||||
|
|
||||||
|
import {
|
||||||
|
formatCustomNodeInputModeKey,
|
||||||
|
formatCustomNodeOutputModeKey,
|
||||||
|
formatCustomNodeSourceKindKey,
|
||||||
|
} from "./custom-node-presenter.ts";
|
||||||
|
|
||||||
|
test("map custom node input mode values to localized translation keys", () => {
|
||||||
|
assert.equal(formatCustomNodeInputModeKey("multi_asset_set"), "customNodeMultiAssetSet");
|
||||||
|
assert.equal(formatCustomNodeInputModeKey("single_asset_set"), "customNodeSingleAssetSet");
|
||||||
|
assert.equal(formatCustomNodeInputModeKey("unexpected"), "customNodeSingleAssetSet");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("map custom node output mode values to localized translation keys", () => {
|
||||||
|
assert.equal(formatCustomNodeOutputModeKey("asset_set"), "customNodeAssetSet");
|
||||||
|
assert.equal(formatCustomNodeOutputModeKey("asset_set_with_report"), "customNodeAssetSetWithReport");
|
||||||
|
assert.equal(formatCustomNodeOutputModeKey("report"), "customNodeReport");
|
||||||
|
assert.equal(formatCustomNodeOutputModeKey("unexpected"), "customNodeReport");
|
||||||
|
});
|
||||||
|
|
||||||
|
test("map custom node source kind values to localized translation keys", () => {
|
||||||
|
assert.equal(formatCustomNodeSourceKindKey("image"), "customNodeSourceImage");
|
||||||
|
assert.equal(formatCustomNodeSourceKindKey("dockerfile"), "customNodeSourceDockerfile");
|
||||||
|
assert.equal(formatCustomNodeSourceKindKey("other"), "none");
|
||||||
|
});
|
||||||
26
apps/web/src/runtime/custom-node-presenter.ts
Normal file
26
apps/web/src/runtime/custom-node-presenter.ts
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
import type { TranslationKey } from "./i18n.tsx";
|
||||||
|
|
||||||
|
export function formatCustomNodeInputModeKey(value: unknown): TranslationKey {
|
||||||
|
return value === "multi_asset_set" ? "customNodeMultiAssetSet" : "customNodeSingleAssetSet";
|
||||||
|
}
|
||||||
|
|
||||||
|
export function formatCustomNodeOutputModeKey(value: unknown): TranslationKey {
|
||||||
|
switch (value) {
|
||||||
|
case "asset_set":
|
||||||
|
return "customNodeAssetSet";
|
||||||
|
case "asset_set_with_report":
|
||||||
|
return "customNodeAssetSetWithReport";
|
||||||
|
default:
|
||||||
|
return "customNodeReport";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function formatCustomNodeSourceKindKey(value: unknown): TranslationKey {
|
||||||
|
if (value === "dockerfile") {
|
||||||
|
return "customNodeSourceDockerfile";
|
||||||
|
}
|
||||||
|
if (value === "image") {
|
||||||
|
return "customNodeSourceImage";
|
||||||
|
}
|
||||||
|
return "none";
|
||||||
|
}
|
||||||
@ -4,8 +4,26 @@ import assert from "node:assert/strict";
|
|||||||
import { localizeNodeDefinition, translate } from "./i18n.tsx";
|
import { localizeNodeDefinition, translate } from "./i18n.tsx";
|
||||||
|
|
||||||
test("translate returns chinese and english labels for shared frontend keys", () => {
|
test("translate returns chinese and english labels for shared frontend keys", () => {
|
||||||
|
assert.equal(translate("en", "navProjects"), "Projects");
|
||||||
|
assert.equal(translate("zh", "navProjects"), "项目");
|
||||||
|
assert.equal(translate("en", "templateSaved"), "Saved template");
|
||||||
|
assert.equal(translate("zh", "templateSaved"), "已保存模板");
|
||||||
assert.equal(translate("en", "navWorkflows"), "Workflows");
|
assert.equal(translate("en", "navWorkflows"), "Workflows");
|
||||||
assert.equal(translate("zh", "navWorkflows"), "工作流");
|
assert.equal(translate("zh", "navWorkflows"), "工作流");
|
||||||
|
assert.equal(translate("en", "navNodes"), "Nodes");
|
||||||
|
assert.equal(translate("zh", "navNodes"), "节点");
|
||||||
|
assert.equal(translate("en", "runInputType"), "Run Input Type");
|
||||||
|
assert.equal(translate("zh", "runInputType"), "运行输入类型");
|
||||||
|
assert.equal(translate("en", "datasetInputKind"), "Dataset");
|
||||||
|
assert.equal(translate("zh", "datasetInputKind"), "数据集");
|
||||||
|
assert.equal(
|
||||||
|
translate("en", "invalidConnectionCycle"),
|
||||||
|
"This edge would create a cycle.",
|
||||||
|
);
|
||||||
|
assert.equal(
|
||||||
|
translate("zh", "dragNodeToCanvas"),
|
||||||
|
"将节点拖放到这里即可在画布中创建。",
|
||||||
|
);
|
||||||
assert.equal(
|
assert.equal(
|
||||||
translate("en", "workflowCreatedName", { count: 3 }),
|
translate("en", "workflowCreatedName", { count: 3 }),
|
||||||
"Delivery Normalize 3",
|
"Delivery Normalize 3",
|
||||||
|
|||||||
@ -2,12 +2,14 @@ import React, { createContext, useContext, useEffect, useMemo, useState } from "
|
|||||||
|
|
||||||
export type Language = "en" | "zh";
|
export type Language = "en" | "zh";
|
||||||
|
|
||||||
type TranslationKey =
|
export type TranslationKey =
|
||||||
| "workspace"
|
| "workspace"
|
||||||
| "project"
|
| "project"
|
||||||
| "runs"
|
| "runs"
|
||||||
| "localDev"
|
| "localDev"
|
||||||
|
| "navProjects"
|
||||||
| "navAssets"
|
| "navAssets"
|
||||||
|
| "navNodes"
|
||||||
| "navWorkflows"
|
| "navWorkflows"
|
||||||
| "navRuns"
|
| "navRuns"
|
||||||
| "navExplore"
|
| "navExplore"
|
||||||
@ -18,6 +20,69 @@ type TranslationKey =
|
|||||||
| "chinese"
|
| "chinese"
|
||||||
| "assetsTitle"
|
| "assetsTitle"
|
||||||
| "assetsDescription"
|
| "assetsDescription"
|
||||||
|
| "projectsTitle"
|
||||||
|
| "projectsDescription"
|
||||||
|
| "projectNameLabel"
|
||||||
|
| "projectDescriptionLabel"
|
||||||
|
| "createProject"
|
||||||
|
| "noProjectsYet"
|
||||||
|
| "activeProject"
|
||||||
|
| "openProject"
|
||||||
|
| "storageConnectionsTitle"
|
||||||
|
| "storageConnectionsDescription"
|
||||||
|
| "createStorageConnection"
|
||||||
|
| "storageProvider"
|
||||||
|
| "bucket"
|
||||||
|
| "endpoint"
|
||||||
|
| "region"
|
||||||
|
| "basePath"
|
||||||
|
| "rootPath"
|
||||||
|
| "noStorageConnectionsYet"
|
||||||
|
| "nodesTitle"
|
||||||
|
| "nodesDescription"
|
||||||
|
| "createCustomNode"
|
||||||
|
| "noCustomNodesYet"
|
||||||
|
| "customNodeName"
|
||||||
|
| "customNodeDescription"
|
||||||
|
| "customNodeCategory"
|
||||||
|
| "customNodeSourceKind"
|
||||||
|
| "customNodeSourceImage"
|
||||||
|
| "customNodeSourceDockerfile"
|
||||||
|
| "customNodeImage"
|
||||||
|
| "customNodeDockerfile"
|
||||||
|
| "customNodeDockerfileUpload"
|
||||||
|
| "customNodeCommand"
|
||||||
|
| "customNodeInputMode"
|
||||||
|
| "customNodeOutputMode"
|
||||||
|
| "customNodeArtifactType"
|
||||||
|
| "customNodeSingleAssetSet"
|
||||||
|
| "customNodeMultiAssetSet"
|
||||||
|
| "customNodeReport"
|
||||||
|
| "customNodeAssetSet"
|
||||||
|
| "customNodeAssetSetWithReport"
|
||||||
|
| "customNodeValidationNameRequired"
|
||||||
|
| "customNodeValidationNameTooLong"
|
||||||
|
| "customNodeValidationInvalidCategory"
|
||||||
|
| "customNodeValidationInvalidSourceKind"
|
||||||
|
| "customNodeValidationImageRequired"
|
||||||
|
| "customNodeValidationDockerfileRequired"
|
||||||
|
| "customNodeValidationDockerfileMissingFrom"
|
||||||
|
| "customNodeValidationInvalidCommand"
|
||||||
|
| "customNodeValidationInvalidInputMode"
|
||||||
|
| "customNodeValidationInvalidOutputMode"
|
||||||
|
| "customNodeValidationInvalidArtifactType"
|
||||||
|
| "customNodeValidationSourceCannotBeMultiInput"
|
||||||
|
| "datasetsTitle"
|
||||||
|
| "datasetsDescription"
|
||||||
|
| "datasetName"
|
||||||
|
| "datasetDescription"
|
||||||
|
| "sourceAsset"
|
||||||
|
| "sourceAssets"
|
||||||
|
| "storageConnection"
|
||||||
|
| "storagePathLabel"
|
||||||
|
| "createDataset"
|
||||||
|
| "noDatasetsYet"
|
||||||
|
| "latestDatasetVersion"
|
||||||
| "localPath"
|
| "localPath"
|
||||||
| "registerLocalPath"
|
| "registerLocalPath"
|
||||||
| "noAssetsYet"
|
| "noAssetsYet"
|
||||||
@ -40,24 +105,47 @@ type TranslationKey =
|
|||||||
| "recommendedNodes"
|
| "recommendedNodes"
|
||||||
| "noProbeReportYet"
|
| "noProbeReportYet"
|
||||||
| "workflowsTitle"
|
| "workflowsTitle"
|
||||||
|
| "workflowTemplatesTitle"
|
||||||
|
| "workflowTemplatesDescription"
|
||||||
| "createWorkflow"
|
| "createWorkflow"
|
||||||
|
| "createBlankWorkflow"
|
||||||
|
| "createWorkflowFromTemplate"
|
||||||
|
| "saveAsTemplate"
|
||||||
|
| "templateName"
|
||||||
|
| "templateDescription"
|
||||||
|
| "templateSaved"
|
||||||
|
| "noWorkflowTemplatesYet"
|
||||||
| "noWorkflowsYet"
|
| "noWorkflowsYet"
|
||||||
| "latestVersion"
|
| "latestVersion"
|
||||||
| "workflowEditor"
|
| "workflowEditor"
|
||||||
| "runAsset"
|
| "workflowChecks"
|
||||||
|
| "runChecks"
|
||||||
|
| "checksPassed"
|
||||||
|
| "checksBlocked"
|
||||||
|
| "checkErrors"
|
||||||
|
| "checkWarnings"
|
||||||
|
| "noChecksRunYet"
|
||||||
|
| "runInputType"
|
||||||
|
| "runInput"
|
||||||
|
| "assetInputKind"
|
||||||
|
| "datasetInputKind"
|
||||||
| "saveWorkflowVersion"
|
| "saveWorkflowVersion"
|
||||||
| "triggerWorkflowRun"
|
| "triggerWorkflowRun"
|
||||||
| "reloadLatestSaved"
|
| "reloadLatestSaved"
|
||||||
| "openLatestRun"
|
| "openLatestRun"
|
||||||
| "selectAssetBeforeRun"
|
| "selectInputBeforeRun"
|
||||||
| "nodeLibrary"
|
| "nodeLibrary"
|
||||||
|
| "nodeLibraryHint"
|
||||||
| "canvas"
|
| "canvas"
|
||||||
| "canvasHint"
|
| "canvasHint"
|
||||||
|
| "dragNodeToCanvas"
|
||||||
| "latestSavedVersions"
|
| "latestSavedVersions"
|
||||||
| "draftStatus"
|
| "draftStatus"
|
||||||
| "draftSynced"
|
| "draftSynced"
|
||||||
| "draftUnsaved"
|
| "draftUnsaved"
|
||||||
| "nodeConfiguration"
|
| "nodeConfiguration"
|
||||||
|
| "inputEnvelope"
|
||||||
|
| "outputEnvelope"
|
||||||
| "category"
|
| "category"
|
||||||
| "definition"
|
| "definition"
|
||||||
| "executorType"
|
| "executorType"
|
||||||
@ -69,11 +157,14 @@ type TranslationKey =
|
|||||||
| "removeNode"
|
| "removeNode"
|
||||||
| "workflowCreatedName"
|
| "workflowCreatedName"
|
||||||
| "noAssetsAvailable"
|
| "noAssetsAvailable"
|
||||||
|
| "noDatasetsAvailable"
|
||||||
| "runsTitle"
|
| "runsTitle"
|
||||||
| "runsDescription"
|
| "runsDescription"
|
||||||
| "noRunsYet"
|
| "noRunsYet"
|
||||||
| "createdAt"
|
| "createdAt"
|
||||||
|
| "inputSources"
|
||||||
| "inputAssets"
|
| "inputAssets"
|
||||||
|
| "inputDatasets"
|
||||||
| "runDetail"
|
| "runDetail"
|
||||||
| "workflow"
|
| "workflow"
|
||||||
| "startedAt"
|
| "startedAt"
|
||||||
@ -84,6 +175,7 @@ type TranslationKey =
|
|||||||
| "retryRun"
|
| "retryRun"
|
||||||
| "runGraph"
|
| "runGraph"
|
||||||
| "boundAssets"
|
| "boundAssets"
|
||||||
|
| "boundDatasets"
|
||||||
| "selectedTask"
|
| "selectedTask"
|
||||||
| "executor"
|
| "executor"
|
||||||
| "executorConfig"
|
| "executorConfig"
|
||||||
@ -108,8 +200,17 @@ type TranslationKey =
|
|||||||
| "loadingArtifact"
|
| "loadingArtifact"
|
||||||
| "bootstrappingLocalWorkspace"
|
| "bootstrappingLocalWorkspace"
|
||||||
| "failedLoadAssets"
|
| "failedLoadAssets"
|
||||||
|
| "failedLoadStorageConnections"
|
||||||
|
| "failedCreateStorageConnection"
|
||||||
|
| "failedLoadDatasets"
|
||||||
|
| "failedCreateDataset"
|
||||||
|
| "failedLoadCustomNodes"
|
||||||
|
| "failedCreateCustomNode"
|
||||||
| "failedRegisterAsset"
|
| "failedRegisterAsset"
|
||||||
| "failedLoadWorkflows"
|
| "failedLoadWorkflows"
|
||||||
|
| "failedLoadTemplates"
|
||||||
|
| "failedCreateTemplate"
|
||||||
|
| "failedCreateWorkflowFromTemplate"
|
||||||
| "failedLoadWorkflow"
|
| "failedLoadWorkflow"
|
||||||
| "failedLoadRuns"
|
| "failedLoadRuns"
|
||||||
| "failedLoadRunDetail"
|
| "failedLoadRunDetail"
|
||||||
@ -119,6 +220,8 @@ type TranslationKey =
|
|||||||
| "failedRetryTask"
|
| "failedRetryTask"
|
||||||
| "failedLoadArtifact"
|
| "failedLoadArtifact"
|
||||||
| "failedBootstrap"
|
| "failedBootstrap"
|
||||||
|
| "failedLoadProjects"
|
||||||
|
| "failedCreateProject"
|
||||||
| "validatedAssetCount"
|
| "validatedAssetCount"
|
||||||
| "loadedAssetCount"
|
| "loadedAssetCount"
|
||||||
| "success"
|
| "success"
|
||||||
@ -136,7 +239,14 @@ type TranslationKey =
|
|||||||
| "artifactsCount"
|
| "artifactsCount"
|
||||||
| "viaExecutor"
|
| "viaExecutor"
|
||||||
| "assetCount"
|
| "assetCount"
|
||||||
| "artifactCount";
|
| "artifactCount"
|
||||||
|
| "invalidConnectionMissingEndpoint"
|
||||||
|
| "invalidConnectionSelf"
|
||||||
|
| "invalidConnectionDuplicate"
|
||||||
|
| "invalidConnectionSourceDisallowsOutgoing"
|
||||||
|
| "invalidConnectionTargetDisallowsIncoming"
|
||||||
|
| "invalidConnectionTargetAlreadyHasIncoming"
|
||||||
|
| "invalidConnectionCycle";
|
||||||
|
|
||||||
const TRANSLATIONS: Record<Language, Record<TranslationKey, string>> = {
|
const TRANSLATIONS: Record<Language, Record<TranslationKey, string>> = {
|
||||||
en: {
|
en: {
|
||||||
@ -144,7 +254,9 @@ const TRANSLATIONS: Record<Language, Record<TranslationKey, string>> = {
|
|||||||
project: "Project",
|
project: "Project",
|
||||||
runs: "Runs",
|
runs: "Runs",
|
||||||
localDev: "Local Dev",
|
localDev: "Local Dev",
|
||||||
|
navProjects: "Projects",
|
||||||
navAssets: "Assets",
|
navAssets: "Assets",
|
||||||
|
navNodes: "Nodes",
|
||||||
navWorkflows: "Workflows",
|
navWorkflows: "Workflows",
|
||||||
navRuns: "Runs",
|
navRuns: "Runs",
|
||||||
navExplore: "Explore",
|
navExplore: "Explore",
|
||||||
@ -156,6 +268,74 @@ const TRANSLATIONS: Record<Language, Record<TranslationKey, string>> = {
|
|||||||
assetsTitle: "Assets",
|
assetsTitle: "Assets",
|
||||||
assetsDescription:
|
assetsDescription:
|
||||||
"Register local folders, archives, or dataset files, then probe them into managed asset metadata.",
|
"Register local folders, archives, or dataset files, then probe them into managed asset metadata.",
|
||||||
|
projectsTitle: "Projects",
|
||||||
|
projectsDescription:
|
||||||
|
"Create project spaces, switch the active project, and manage project-scoped assets, datasets, workflows, and runs.",
|
||||||
|
projectNameLabel: "Project Name",
|
||||||
|
projectDescriptionLabel: "Project Description",
|
||||||
|
createProject: "Create Project",
|
||||||
|
noProjectsYet: "No projects yet.",
|
||||||
|
activeProject: "Active project",
|
||||||
|
openProject: "Open Project",
|
||||||
|
storageConnectionsTitle: "Storage Connections",
|
||||||
|
storageConnectionsDescription:
|
||||||
|
"Define where project datasets are stored, including local paths and object storage providers.",
|
||||||
|
nodesTitle: "Custom Nodes",
|
||||||
|
nodesDescription:
|
||||||
|
"Register project-level docker nodes from an image or a self-contained Dockerfile. Containers must read the EmboFlow input path and write a result object to the EmboFlow output path.",
|
||||||
|
createCustomNode: "Create Custom Node",
|
||||||
|
noCustomNodesYet: "No custom nodes have been created yet.",
|
||||||
|
customNodeName: "Node Name",
|
||||||
|
customNodeDescription: "Node Description",
|
||||||
|
customNodeCategory: "Node Category",
|
||||||
|
customNodeSourceKind: "Container Source",
|
||||||
|
customNodeSourceImage: "Docker Image",
|
||||||
|
customNodeSourceDockerfile: "Dockerfile",
|
||||||
|
customNodeImage: "Image",
|
||||||
|
customNodeDockerfile: "Dockerfile Content",
|
||||||
|
customNodeDockerfileUpload: "Upload Dockerfile",
|
||||||
|
customNodeCommand: "Command (one argument per line)",
|
||||||
|
customNodeInputMode: "Input Contract",
|
||||||
|
customNodeOutputMode: "Output Contract",
|
||||||
|
customNodeArtifactType: "Artifact Type",
|
||||||
|
customNodeSingleAssetSet: "Single asset set",
|
||||||
|
customNodeMultiAssetSet: "Multiple asset sets",
|
||||||
|
customNodeReport: "Report only",
|
||||||
|
customNodeAssetSet: "Asset set",
|
||||||
|
customNodeAssetSetWithReport: "Asset set with report",
|
||||||
|
customNodeValidationNameRequired: "Node name is required.",
|
||||||
|
customNodeValidationNameTooLong: "Node name must be 80 characters or fewer.",
|
||||||
|
customNodeValidationInvalidCategory: "Node category is invalid.",
|
||||||
|
customNodeValidationInvalidSourceKind: "Container source must be Docker image or Dockerfile.",
|
||||||
|
customNodeValidationImageRequired: "Docker image is required.",
|
||||||
|
customNodeValidationDockerfileRequired: "Dockerfile content is required.",
|
||||||
|
customNodeValidationDockerfileMissingFrom: "Dockerfile must include a FROM instruction.",
|
||||||
|
customNodeValidationInvalidCommand: "Command must contain non-empty arguments only.",
|
||||||
|
customNodeValidationInvalidInputMode: "Input contract is invalid.",
|
||||||
|
customNodeValidationInvalidOutputMode: "Output contract is invalid.",
|
||||||
|
customNodeValidationInvalidArtifactType: "Artifact type is invalid.",
|
||||||
|
customNodeValidationSourceCannotBeMultiInput:
|
||||||
|
"Source category nodes cannot declare multiple upstream asset sets.",
|
||||||
|
createStorageConnection: "Create Storage Connection",
|
||||||
|
storageProvider: "Storage Provider",
|
||||||
|
bucket: "Bucket",
|
||||||
|
endpoint: "Endpoint",
|
||||||
|
region: "Region",
|
||||||
|
basePath: "Base Path",
|
||||||
|
rootPath: "Root Path",
|
||||||
|
noStorageConnectionsYet: "No storage connections yet.",
|
||||||
|
datasetsTitle: "Datasets",
|
||||||
|
datasetsDescription:
|
||||||
|
"Create project datasets from source assets and bind them to a storage connection.",
|
||||||
|
datasetName: "Dataset Name",
|
||||||
|
datasetDescription: "Dataset Description",
|
||||||
|
sourceAsset: "Source Asset",
|
||||||
|
sourceAssets: "Source Assets",
|
||||||
|
storageConnection: "Storage Connection",
|
||||||
|
storagePathLabel: "Storage Path",
|
||||||
|
createDataset: "Create Dataset",
|
||||||
|
noDatasetsYet: "No datasets have been created yet.",
|
||||||
|
latestDatasetVersion: "Latest dataset version",
|
||||||
localPath: "Local Path",
|
localPath: "Local Path",
|
||||||
registerLocalPath: "Register Local Path",
|
registerLocalPath: "Register Local Path",
|
||||||
noAssetsYet: "No assets have been registered yet.",
|
noAssetsYet: "No assets have been registered yet.",
|
||||||
@ -178,24 +358,48 @@ const TRANSLATIONS: Record<Language, Record<TranslationKey, string>> = {
|
|||||||
recommendedNodes: "Recommended nodes",
|
recommendedNodes: "Recommended nodes",
|
||||||
noProbeReportYet: "No probe report yet.",
|
noProbeReportYet: "No probe report yet.",
|
||||||
workflowsTitle: "Workflows",
|
workflowsTitle: "Workflows",
|
||||||
|
workflowTemplatesTitle: "Workflow Templates",
|
||||||
|
workflowTemplatesDescription:
|
||||||
|
"Start workflows from reusable templates or create a blank workflow directly in the project.",
|
||||||
createWorkflow: "Create Workflow",
|
createWorkflow: "Create Workflow",
|
||||||
|
createBlankWorkflow: "Create Blank Workflow",
|
||||||
|
createWorkflowFromTemplate: "Create From Template",
|
||||||
|
saveAsTemplate: "Save As Template",
|
||||||
|
templateName: "Template Name",
|
||||||
|
templateDescription: "Template Description",
|
||||||
|
templateSaved: "Saved template",
|
||||||
|
noWorkflowTemplatesYet: "No workflow templates yet.",
|
||||||
noWorkflowsYet: "No workflows yet.",
|
noWorkflowsYet: "No workflows yet.",
|
||||||
latestVersion: "Latest version",
|
latestVersion: "Latest version",
|
||||||
workflowEditor: "Workflow Editor",
|
workflowEditor: "Workflow Editor",
|
||||||
runAsset: "Run Asset",
|
workflowChecks: "Workflow Checks",
|
||||||
|
runChecks: "Run Checks",
|
||||||
|
checksPassed: "Checks passed",
|
||||||
|
checksBlocked: "Blocking issues found",
|
||||||
|
checkErrors: "{count} errors",
|
||||||
|
checkWarnings: "{count} warnings",
|
||||||
|
noChecksRunYet: "Run checks to validate the saved workflow version before execution.",
|
||||||
|
runInputType: "Run Input Type",
|
||||||
|
runInput: "Run Input",
|
||||||
|
assetInputKind: "Asset",
|
||||||
|
datasetInputKind: "Dataset",
|
||||||
saveWorkflowVersion: "Save Workflow Version",
|
saveWorkflowVersion: "Save Workflow Version",
|
||||||
triggerWorkflowRun: "Trigger Workflow Run",
|
triggerWorkflowRun: "Trigger Workflow Run",
|
||||||
reloadLatestSaved: "Reload Latest Saved",
|
reloadLatestSaved: "Reload Latest Saved",
|
||||||
openLatestRun: "Open Latest Run",
|
openLatestRun: "Open Latest Run",
|
||||||
selectAssetBeforeRun: "Select an asset before triggering a workflow run.",
|
selectInputBeforeRun: "Select an asset or dataset before triggering a workflow run.",
|
||||||
nodeLibrary: "Node Library",
|
nodeLibrary: "Node Library",
|
||||||
|
nodeLibraryHint: "Click to append or drag a node onto the canvas.",
|
||||||
canvas: "Canvas",
|
canvas: "Canvas",
|
||||||
canvasHint: "Drag nodes freely, connect handles, zoom, and pan.",
|
canvasHint: "Drag nodes freely, connect handles, zoom, and pan.",
|
||||||
|
dragNodeToCanvas: "Drop the node here to place it on the canvas.",
|
||||||
latestSavedVersions: "Latest saved versions",
|
latestSavedVersions: "Latest saved versions",
|
||||||
draftStatus: "Draft status",
|
draftStatus: "Draft status",
|
||||||
draftSynced: "synced",
|
draftSynced: "synced",
|
||||||
draftUnsaved: "unsaved changes",
|
draftUnsaved: "unsaved changes",
|
||||||
nodeConfiguration: "Node Configuration",
|
nodeConfiguration: "Node Configuration",
|
||||||
|
inputEnvelope: "Input Envelope",
|
||||||
|
outputEnvelope: "Output Envelope",
|
||||||
category: "Category",
|
category: "Category",
|
||||||
definition: "Definition",
|
definition: "Definition",
|
||||||
executorType: "Executor Type",
|
executorType: "Executor Type",
|
||||||
@ -207,11 +411,14 @@ const TRANSLATIONS: Record<Language, Record<TranslationKey, string>> = {
|
|||||||
removeNode: "Remove Node",
|
removeNode: "Remove Node",
|
||||||
workflowCreatedName: "Delivery Normalize {count}",
|
workflowCreatedName: "Delivery Normalize {count}",
|
||||||
noAssetsAvailable: "No assets available",
|
noAssetsAvailable: "No assets available",
|
||||||
|
noDatasetsAvailable: "No datasets available",
|
||||||
runsTitle: "Runs",
|
runsTitle: "Runs",
|
||||||
runsDescription: "Recent workflow executions for the current project.",
|
runsDescription: "Recent workflow executions for the current project.",
|
||||||
noRunsYet: "No workflow runs yet.",
|
noRunsYet: "No workflow runs yet.",
|
||||||
createdAt: "Created at",
|
createdAt: "Created at",
|
||||||
|
inputSources: "Input sources",
|
||||||
inputAssets: "Input assets",
|
inputAssets: "Input assets",
|
||||||
|
inputDatasets: "Input datasets",
|
||||||
runDetail: "Run Detail",
|
runDetail: "Run Detail",
|
||||||
workflow: "Workflow",
|
workflow: "Workflow",
|
||||||
startedAt: "Started at",
|
startedAt: "Started at",
|
||||||
@ -222,6 +429,7 @@ const TRANSLATIONS: Record<Language, Record<TranslationKey, string>> = {
|
|||||||
retryRun: "Retry Run",
|
retryRun: "Retry Run",
|
||||||
runGraph: "Run Graph",
|
runGraph: "Run Graph",
|
||||||
boundAssets: "Bound assets",
|
boundAssets: "Bound assets",
|
||||||
|
boundDatasets: "Bound datasets",
|
||||||
selectedTask: "Selected Task",
|
selectedTask: "Selected Task",
|
||||||
executor: "Executor",
|
executor: "Executor",
|
||||||
executorConfig: "Executor config",
|
executorConfig: "Executor config",
|
||||||
@ -246,8 +454,17 @@ const TRANSLATIONS: Record<Language, Record<TranslationKey, string>> = {
|
|||||||
loadingArtifact: "Loading artifact...",
|
loadingArtifact: "Loading artifact...",
|
||||||
bootstrappingLocalWorkspace: "Bootstrapping local workspace...",
|
bootstrappingLocalWorkspace: "Bootstrapping local workspace...",
|
||||||
failedLoadAssets: "Failed to load assets",
|
failedLoadAssets: "Failed to load assets",
|
||||||
|
failedLoadStorageConnections: "Failed to load storage connections",
|
||||||
|
failedCreateStorageConnection: "Failed to create storage connection",
|
||||||
|
failedLoadDatasets: "Failed to load datasets",
|
||||||
|
failedCreateDataset: "Failed to create dataset",
|
||||||
|
failedLoadCustomNodes: "Failed to load custom nodes",
|
||||||
|
failedCreateCustomNode: "Failed to create custom node",
|
||||||
failedRegisterAsset: "Failed to register local asset",
|
failedRegisterAsset: "Failed to register local asset",
|
||||||
failedLoadWorkflows: "Failed to load workflows",
|
failedLoadWorkflows: "Failed to load workflows",
|
||||||
|
failedLoadTemplates: "Failed to load workflow templates",
|
||||||
|
failedCreateTemplate: "Failed to create workflow template",
|
||||||
|
failedCreateWorkflowFromTemplate: "Failed to create workflow from template",
|
||||||
failedLoadWorkflow: "Failed to load workflow",
|
failedLoadWorkflow: "Failed to load workflow",
|
||||||
failedLoadRuns: "Failed to load runs",
|
failedLoadRuns: "Failed to load runs",
|
||||||
failedLoadRunDetail: "Failed to load run detail",
|
failedLoadRunDetail: "Failed to load run detail",
|
||||||
@ -257,6 +474,8 @@ const TRANSLATIONS: Record<Language, Record<TranslationKey, string>> = {
|
|||||||
failedRetryTask: "Failed to retry task",
|
failedRetryTask: "Failed to retry task",
|
||||||
failedLoadArtifact: "Failed to load artifact",
|
failedLoadArtifact: "Failed to load artifact",
|
||||||
failedBootstrap: "Failed to bootstrap local context",
|
failedBootstrap: "Failed to bootstrap local context",
|
||||||
|
failedLoadProjects: "Failed to load projects",
|
||||||
|
failedCreateProject: "Failed to create project",
|
||||||
validatedAssetCount: "validated {count} asset{suffix}",
|
validatedAssetCount: "validated {count} asset{suffix}",
|
||||||
loadedAssetCount: "loaded {count} bound asset{suffix}",
|
loadedAssetCount: "loaded {count} bound asset{suffix}",
|
||||||
success: "success",
|
success: "success",
|
||||||
@ -275,13 +494,22 @@ const TRANSLATIONS: Record<Language, Record<TranslationKey, string>> = {
|
|||||||
viaExecutor: "{outcome} via {executor}",
|
viaExecutor: "{outcome} via {executor}",
|
||||||
assetCount: "assets {count}",
|
assetCount: "assets {count}",
|
||||||
artifactCount: "artifacts {count}",
|
artifactCount: "artifacts {count}",
|
||||||
|
invalidConnectionMissingEndpoint: "The connection is missing a valid source or target node.",
|
||||||
|
invalidConnectionSelf: "A node cannot connect to itself.",
|
||||||
|
invalidConnectionDuplicate: "This edge already exists.",
|
||||||
|
invalidConnectionSourceDisallowsOutgoing: "Export nodes cannot create outgoing connections in V1.",
|
||||||
|
invalidConnectionTargetDisallowsIncoming: "Source nodes cannot accept incoming connections.",
|
||||||
|
invalidConnectionTargetAlreadyHasIncoming: "This node already has an upstream connection in V1.",
|
||||||
|
invalidConnectionCycle: "This edge would create a cycle.",
|
||||||
},
|
},
|
||||||
zh: {
|
zh: {
|
||||||
workspace: "工作空间",
|
workspace: "工作空间",
|
||||||
project: "项目",
|
project: "项目",
|
||||||
runs: "运行",
|
runs: "运行",
|
||||||
localDev: "本地开发",
|
localDev: "本地开发",
|
||||||
|
navProjects: "项目",
|
||||||
navAssets: "数据资产",
|
navAssets: "数据资产",
|
||||||
|
navNodes: "节点",
|
||||||
navWorkflows: "工作流",
|
navWorkflows: "工作流",
|
||||||
navRuns: "运行记录",
|
navRuns: "运行记录",
|
||||||
navExplore: "查看",
|
navExplore: "查看",
|
||||||
@ -292,6 +520,70 @@ const TRANSLATIONS: Record<Language, Record<TranslationKey, string>> = {
|
|||||||
chinese: "中文",
|
chinese: "中文",
|
||||||
assetsTitle: "数据资产",
|
assetsTitle: "数据资产",
|
||||||
assetsDescription: "注册本地目录、压缩包或数据集文件,并将其探测为受管资产元数据。",
|
assetsDescription: "注册本地目录、压缩包或数据集文件,并将其探测为受管资产元数据。",
|
||||||
|
projectsTitle: "项目",
|
||||||
|
projectsDescription: "创建项目、切换当前项目,并管理项目级资产、数据集、工作流与运行记录。",
|
||||||
|
projectNameLabel: "项目名称",
|
||||||
|
projectDescriptionLabel: "项目描述",
|
||||||
|
createProject: "创建项目",
|
||||||
|
noProjectsYet: "还没有项目。",
|
||||||
|
activeProject: "当前项目",
|
||||||
|
openProject: "打开项目",
|
||||||
|
storageConnectionsTitle: "存储连接",
|
||||||
|
storageConnectionsDescription: "定义项目数据集的存储位置,包括本地路径和对象存储提供方。",
|
||||||
|
nodesTitle: "自定义节点",
|
||||||
|
nodesDescription:
|
||||||
|
"通过镜像或自包含 Dockerfile 注册项目级 Docker 节点。容器必须读取 EmboFlow 输入路径,并把结果对象写入 EmboFlow 输出路径。",
|
||||||
|
createCustomNode: "创建自定义节点",
|
||||||
|
noCustomNodesYet: "当前还没有自定义节点。",
|
||||||
|
customNodeName: "节点名称",
|
||||||
|
customNodeDescription: "节点描述",
|
||||||
|
customNodeCategory: "节点分类",
|
||||||
|
customNodeSourceKind: "容器来源",
|
||||||
|
customNodeSourceImage: "Docker 镜像",
|
||||||
|
customNodeSourceDockerfile: "Dockerfile",
|
||||||
|
customNodeImage: "镜像",
|
||||||
|
customNodeDockerfile: "Dockerfile 内容",
|
||||||
|
customNodeDockerfileUpload: "上传 Dockerfile",
|
||||||
|
customNodeCommand: "启动命令(每行一个参数)",
|
||||||
|
customNodeInputMode: "输入契约",
|
||||||
|
customNodeOutputMode: "输出契约",
|
||||||
|
customNodeArtifactType: "产物类型",
|
||||||
|
customNodeSingleAssetSet: "单资产集",
|
||||||
|
customNodeMultiAssetSet: "多资产集",
|
||||||
|
customNodeReport: "仅报告",
|
||||||
|
customNodeAssetSet: "资产集",
|
||||||
|
customNodeAssetSetWithReport: "资产集加报告",
|
||||||
|
customNodeValidationNameRequired: "必须填写节点名称。",
|
||||||
|
customNodeValidationNameTooLong: "节点名称长度不能超过 80 个字符。",
|
||||||
|
customNodeValidationInvalidCategory: "节点分类无效。",
|
||||||
|
customNodeValidationInvalidSourceKind: "容器来源必须是 Docker 镜像或 Dockerfile。",
|
||||||
|
customNodeValidationImageRequired: "必须填写 Docker 镜像。",
|
||||||
|
customNodeValidationDockerfileRequired: "必须填写 Dockerfile 内容。",
|
||||||
|
customNodeValidationDockerfileMissingFrom: "Dockerfile 必须包含 FROM 指令。",
|
||||||
|
customNodeValidationInvalidCommand: "启动命令只能包含非空参数。",
|
||||||
|
customNodeValidationInvalidInputMode: "输入契约无效。",
|
||||||
|
customNodeValidationInvalidOutputMode: "输出契约无效。",
|
||||||
|
customNodeValidationInvalidArtifactType: "产物类型无效。",
|
||||||
|
customNodeValidationSourceCannotBeMultiInput: "Source 分类节点不能声明多资产集输入。",
|
||||||
|
createStorageConnection: "创建存储连接",
|
||||||
|
storageProvider: "存储提供方",
|
||||||
|
bucket: "Bucket",
|
||||||
|
endpoint: "Endpoint",
|
||||||
|
region: "Region",
|
||||||
|
basePath: "基础路径",
|
||||||
|
rootPath: "根路径",
|
||||||
|
noStorageConnectionsYet: "还没有存储连接。",
|
||||||
|
datasetsTitle: "数据集",
|
||||||
|
datasetsDescription: "从源资产创建项目数据集,并绑定到一个存储连接。",
|
||||||
|
datasetName: "数据集名称",
|
||||||
|
datasetDescription: "数据集描述",
|
||||||
|
sourceAsset: "源资产",
|
||||||
|
sourceAssets: "源资产",
|
||||||
|
storageConnection: "存储连接",
|
||||||
|
storagePathLabel: "存储路径",
|
||||||
|
createDataset: "创建数据集",
|
||||||
|
noDatasetsYet: "还没有创建任何数据集。",
|
||||||
|
latestDatasetVersion: "最新数据集版本",
|
||||||
localPath: "本地路径",
|
localPath: "本地路径",
|
||||||
registerLocalPath: "注册本地路径",
|
registerLocalPath: "注册本地路径",
|
||||||
noAssetsYet: "还没有注册任何资产。",
|
noAssetsYet: "还没有注册任何资产。",
|
||||||
@ -314,24 +606,47 @@ const TRANSLATIONS: Record<Language, Record<TranslationKey, string>> = {
|
|||||||
recommendedNodes: "推荐节点",
|
recommendedNodes: "推荐节点",
|
||||||
noProbeReportYet: "还没有探测报告。",
|
noProbeReportYet: "还没有探测报告。",
|
||||||
workflowsTitle: "工作流",
|
workflowsTitle: "工作流",
|
||||||
|
workflowTemplatesTitle: "工作流模板",
|
||||||
|
workflowTemplatesDescription: "从可复用模板创建工作流,或者直接在项目里创建空白工作流。",
|
||||||
createWorkflow: "新建工作流",
|
createWorkflow: "新建工作流",
|
||||||
|
createBlankWorkflow: "创建空白工作流",
|
||||||
|
createWorkflowFromTemplate: "从模板创建工作流",
|
||||||
|
saveAsTemplate: "另存为模板",
|
||||||
|
templateName: "模板名称",
|
||||||
|
templateDescription: "模板描述",
|
||||||
|
templateSaved: "已保存模板",
|
||||||
|
noWorkflowTemplatesYet: "还没有工作流模板。",
|
||||||
noWorkflowsYet: "还没有工作流。",
|
noWorkflowsYet: "还没有工作流。",
|
||||||
latestVersion: "最新版本",
|
latestVersion: "最新版本",
|
||||||
workflowEditor: "工作流编辑器",
|
workflowEditor: "工作流编辑器",
|
||||||
runAsset: "运行资产",
|
workflowChecks: "工作流检查",
|
||||||
|
runChecks: "执行检查",
|
||||||
|
checksPassed: "检查通过",
|
||||||
|
checksBlocked: "发现阻塞问题",
|
||||||
|
checkErrors: "{count} 个错误",
|
||||||
|
checkWarnings: "{count} 个警告",
|
||||||
|
noChecksRunYet: "先执行检查,再触发运行已保存的工作流版本。",
|
||||||
|
runInputType: "运行输入类型",
|
||||||
|
runInput: "运行输入",
|
||||||
|
assetInputKind: "资产",
|
||||||
|
datasetInputKind: "数据集",
|
||||||
saveWorkflowVersion: "保存工作流版本",
|
saveWorkflowVersion: "保存工作流版本",
|
||||||
triggerWorkflowRun: "触发工作流运行",
|
triggerWorkflowRun: "触发工作流运行",
|
||||||
reloadLatestSaved: "重新加载最新保存版本",
|
reloadLatestSaved: "重新加载最新保存版本",
|
||||||
openLatestRun: "打开最新运行",
|
openLatestRun: "打开最新运行",
|
||||||
selectAssetBeforeRun: "触发工作流运行前请先选择资产。",
|
selectInputBeforeRun: "触发工作流运行前请先选择资产或数据集。",
|
||||||
nodeLibrary: "节点面板",
|
nodeLibrary: "节点面板",
|
||||||
|
nodeLibraryHint: "支持点击追加,也支持将节点拖入画布指定位置。",
|
||||||
canvas: "画布",
|
canvas: "画布",
|
||||||
canvasHint: "支持自由拖动节点、拖拽连线、缩放和平移。",
|
canvasHint: "支持自由拖动节点、拖拽连线、缩放和平移。",
|
||||||
|
dragNodeToCanvas: "将节点拖放到这里即可在画布中创建。",
|
||||||
latestSavedVersions: "最近保存版本",
|
latestSavedVersions: "最近保存版本",
|
||||||
draftStatus: "草稿状态",
|
draftStatus: "草稿状态",
|
||||||
draftSynced: "已同步",
|
draftSynced: "已同步",
|
||||||
draftUnsaved: "有未保存修改",
|
draftUnsaved: "有未保存修改",
|
||||||
nodeConfiguration: "节点配置",
|
nodeConfiguration: "节点配置",
|
||||||
|
inputEnvelope: "输入 Envelope",
|
||||||
|
outputEnvelope: "输出 Envelope",
|
||||||
category: "分类",
|
category: "分类",
|
||||||
definition: "定义",
|
definition: "定义",
|
||||||
executorType: "执行器类型",
|
executorType: "执行器类型",
|
||||||
@ -343,11 +658,14 @@ const TRANSLATIONS: Record<Language, Record<TranslationKey, string>> = {
|
|||||||
removeNode: "删除节点",
|
removeNode: "删除节点",
|
||||||
workflowCreatedName: "交付标准化 {count}",
|
workflowCreatedName: "交付标准化 {count}",
|
||||||
noAssetsAvailable: "没有可用资产",
|
noAssetsAvailable: "没有可用资产",
|
||||||
|
noDatasetsAvailable: "没有可用数据集",
|
||||||
runsTitle: "运行记录",
|
runsTitle: "运行记录",
|
||||||
runsDescription: "当前项目最近的工作流执行记录。",
|
runsDescription: "当前项目最近的工作流执行记录。",
|
||||||
noRunsYet: "还没有工作流运行记录。",
|
noRunsYet: "还没有工作流运行记录。",
|
||||||
createdAt: "创建时间",
|
createdAt: "创建时间",
|
||||||
|
inputSources: "输入源",
|
||||||
inputAssets: "输入资产",
|
inputAssets: "输入资产",
|
||||||
|
inputDatasets: "输入数据集",
|
||||||
runDetail: "运行详情",
|
runDetail: "运行详情",
|
||||||
workflow: "工作流",
|
workflow: "工作流",
|
||||||
startedAt: "开始时间",
|
startedAt: "开始时间",
|
||||||
@ -358,6 +676,7 @@ const TRANSLATIONS: Record<Language, Record<TranslationKey, string>> = {
|
|||||||
retryRun: "重试运行",
|
retryRun: "重试运行",
|
||||||
runGraph: "运行图",
|
runGraph: "运行图",
|
||||||
boundAssets: "绑定资产",
|
boundAssets: "绑定资产",
|
||||||
|
boundDatasets: "绑定数据集",
|
||||||
selectedTask: "当前任务",
|
selectedTask: "当前任务",
|
||||||
executor: "执行器",
|
executor: "执行器",
|
||||||
executorConfig: "执行器配置",
|
executorConfig: "执行器配置",
|
||||||
@ -382,8 +701,17 @@ const TRANSLATIONS: Record<Language, Record<TranslationKey, string>> = {
|
|||||||
loadingArtifact: "正在加载产物...",
|
loadingArtifact: "正在加载产物...",
|
||||||
bootstrappingLocalWorkspace: "正在初始化本地工作空间...",
|
bootstrappingLocalWorkspace: "正在初始化本地工作空间...",
|
||||||
failedLoadAssets: "加载资产失败",
|
failedLoadAssets: "加载资产失败",
|
||||||
|
failedLoadStorageConnections: "加载存储连接失败",
|
||||||
|
failedCreateStorageConnection: "创建存储连接失败",
|
||||||
|
failedLoadDatasets: "加载数据集失败",
|
||||||
|
failedCreateDataset: "创建数据集失败",
|
||||||
|
failedLoadCustomNodes: "加载自定义节点失败",
|
||||||
|
failedCreateCustomNode: "创建自定义节点失败",
|
||||||
failedRegisterAsset: "注册本地资产失败",
|
failedRegisterAsset: "注册本地资产失败",
|
||||||
failedLoadWorkflows: "加载工作流失败",
|
failedLoadWorkflows: "加载工作流失败",
|
||||||
|
failedLoadTemplates: "加载工作流模板失败",
|
||||||
|
failedCreateTemplate: "创建工作流模板失败",
|
||||||
|
failedCreateWorkflowFromTemplate: "从模板创建工作流失败",
|
||||||
failedLoadWorkflow: "加载工作流失败",
|
failedLoadWorkflow: "加载工作流失败",
|
||||||
failedLoadRuns: "加载运行列表失败",
|
failedLoadRuns: "加载运行列表失败",
|
||||||
failedLoadRunDetail: "加载运行详情失败",
|
failedLoadRunDetail: "加载运行详情失败",
|
||||||
@ -393,6 +721,8 @@ const TRANSLATIONS: Record<Language, Record<TranslationKey, string>> = {
|
|||||||
failedRetryTask: "重试任务失败",
|
failedRetryTask: "重试任务失败",
|
||||||
failedLoadArtifact: "加载产物失败",
|
failedLoadArtifact: "加载产物失败",
|
||||||
failedBootstrap: "初始化本地上下文失败",
|
failedBootstrap: "初始化本地上下文失败",
|
||||||
|
failedLoadProjects: "加载项目失败",
|
||||||
|
failedCreateProject: "创建项目失败",
|
||||||
validatedAssetCount: "已校验 {count} 个资产",
|
validatedAssetCount: "已校验 {count} 个资产",
|
||||||
loadedAssetCount: "已加载 {count} 个绑定资产",
|
loadedAssetCount: "已加载 {count} 个绑定资产",
|
||||||
success: "成功",
|
success: "成功",
|
||||||
@ -411,6 +741,13 @@ const TRANSLATIONS: Record<Language, Record<TranslationKey, string>> = {
|
|||||||
viaExecutor: "{outcome},执行器 {executor}",
|
viaExecutor: "{outcome},执行器 {executor}",
|
||||||
assetCount: "资产 {count}",
|
assetCount: "资产 {count}",
|
||||||
artifactCount: "产物 {count}",
|
artifactCount: "产物 {count}",
|
||||||
|
invalidConnectionMissingEndpoint: "该连线缺少有效的起点或终点节点。",
|
||||||
|
invalidConnectionSelf: "节点不能连接自己。",
|
||||||
|
invalidConnectionDuplicate: "这条连线已经存在。",
|
||||||
|
invalidConnectionSourceDisallowsOutgoing: "V1 中导出节点不允许继续向外连线。",
|
||||||
|
invalidConnectionTargetDisallowsIncoming: "数据源节点不允许接收入边。",
|
||||||
|
invalidConnectionTargetAlreadyHasIncoming: "V1 中该节点只能保留一条上游入边。",
|
||||||
|
invalidConnectionCycle: "这条连线会形成环路。",
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -435,6 +772,18 @@ const BUILTIN_NODE_TRANSLATIONS: Record<string, { en: { name: string; descriptio
|
|||||||
en: { name: "Validate Metadata", description: "Validate meta.json, intrinsics.json, and video_meta.json." },
|
en: { name: "Validate Metadata", description: "Validate meta.json, intrinsics.json, and video_meta.json." },
|
||||||
zh: { name: "校验元数据", description: "校验 meta.json、intrinsics.json 和 video_meta.json。" },
|
zh: { name: "校验元数据", description: "校验 meta.json、intrinsics.json 和 video_meta.json。" },
|
||||||
},
|
},
|
||||||
|
"union-assets": {
|
||||||
|
en: { name: "Union Assets", description: "Merge multiple upstream asset sets into one deduplicated asset set." },
|
||||||
|
zh: { name: "资产并集", description: "将多个上游资产集合合并为一个去重后的资产集合。" },
|
||||||
|
},
|
||||||
|
"intersect-assets": {
|
||||||
|
en: { name: "Intersect Assets", description: "Keep only the assets that exist in every upstream asset set." },
|
||||||
|
zh: { name: "资产交集", description: "只保留所有上游资产集合共同包含的资产。" },
|
||||||
|
},
|
||||||
|
"difference-assets": {
|
||||||
|
en: { name: "Difference Assets", description: "Subtract downstream asset sets from the first upstream asset set." },
|
||||||
|
zh: { name: "资产差集", description: "从第一个上游资产集合中减去后续上游资产集合。" },
|
||||||
|
},
|
||||||
"export-delivery-package": {
|
"export-delivery-package": {
|
||||||
en: { name: "Export Delivery Package", description: "Produce the final delivery package artifact for upload." },
|
en: { name: "Export Delivery Package", description: "Produce the final delivery package artifact for upload." },
|
||||||
zh: { name: "导出交付包", description: "生成最终交付包产物用于上传或交付。" },
|
zh: { name: "导出交付包", description: "生成最终交付包产物用于上传或交付。" },
|
||||||
|
|||||||
@ -3,6 +3,8 @@ import assert from "node:assert/strict";
|
|||||||
|
|
||||||
import {
|
import {
|
||||||
addNodeToDraft,
|
addNodeToDraft,
|
||||||
|
addNodeToDraftAtPosition,
|
||||||
|
canConnectNodesInDraft,
|
||||||
connectNodesInDraft,
|
connectNodesInDraft,
|
||||||
createDefaultWorkflowDraft,
|
createDefaultWorkflowDraft,
|
||||||
getNodeRuntimeConfig,
|
getNodeRuntimeConfig,
|
||||||
@ -54,6 +56,63 @@ test("add node appends a unique node id and a sequential edge by default", () =>
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("add node at explicit canvas position without auto-connecting it", () => {
|
||||||
|
const base = createDefaultWorkflowDraft();
|
||||||
|
const result = addNodeToDraftAtPosition(
|
||||||
|
base,
|
||||||
|
{
|
||||||
|
id: "export-delivery-package",
|
||||||
|
name: "Export Delivery Package",
|
||||||
|
category: "Export",
|
||||||
|
},
|
||||||
|
{ x: 888, y: 432 },
|
||||||
|
);
|
||||||
|
|
||||||
|
assert.equal(result.nodeId, "export-delivery-package-1");
|
||||||
|
assert.deepEqual(result.draft.visualGraph.nodePositions["export-delivery-package-1"], {
|
||||||
|
x: 888,
|
||||||
|
y: 432,
|
||||||
|
});
|
||||||
|
assert.equal(
|
||||||
|
result.draft.logicGraph.edges.some((edge) => edge.to === "export-delivery-package-1"),
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("add custom docker node seeds runtime defaults from the node definition", () => {
|
||||||
|
const base = createDefaultWorkflowDraft();
|
||||||
|
const result = addNodeToDraft(base, {
|
||||||
|
id: "custom-merge-assets",
|
||||||
|
name: "Custom Merge Assets",
|
||||||
|
category: "Utility",
|
||||||
|
defaultExecutorType: "docker",
|
||||||
|
defaultExecutorConfig: {
|
||||||
|
image: "python:3.11-alpine",
|
||||||
|
contract: {
|
||||||
|
inputMode: "multi_asset_set",
|
||||||
|
outputMode: "asset_set",
|
||||||
|
artifactType: "json",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
allowsMultipleIncoming: true,
|
||||||
|
supportsCodeHook: false,
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(result.nodeId, "custom-merge-assets-1");
|
||||||
|
assert.equal(
|
||||||
|
getNodeRuntimeConfig(result.draft, "custom-merge-assets-1")?.executorType,
|
||||||
|
"docker",
|
||||||
|
);
|
||||||
|
assert.equal(
|
||||||
|
(
|
||||||
|
getNodeRuntimeConfig(result.draft, "custom-merge-assets-1")?.executorConfig as {
|
||||||
|
contract?: { inputMode?: string };
|
||||||
|
}
|
||||||
|
)?.contract?.inputMode,
|
||||||
|
"multi_asset_set",
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
test("remove node prunes attached edges and serialize emits workflow version payload", () => {
|
test("remove node prunes attached edges and serialize emits workflow version payload", () => {
|
||||||
const draft = workflowDraftFromVersion({
|
const draft = workflowDraftFromVersion({
|
||||||
visualGraph: { viewport: { x: 0, y: 0, zoom: 1 } },
|
visualGraph: { viewport: { x: 0, y: 0, zoom: 1 } },
|
||||||
@ -113,16 +172,197 @@ test("set per-node runtime config and keep it in the serialized workflow payload
|
|||||||
});
|
});
|
||||||
|
|
||||||
test("update node positions, connect nodes, and persist viewport in the workflow draft", () => {
|
test("update node positions, connect nodes, and persist viewport in the workflow draft", () => {
|
||||||
const draft = createDefaultWorkflowDraft();
|
const draft = addNodeToDraftAtPosition(
|
||||||
|
createDefaultWorkflowDraft(),
|
||||||
|
{
|
||||||
|
id: "export-delivery-package",
|
||||||
|
name: "Export Delivery Package",
|
||||||
|
category: "Export",
|
||||||
|
},
|
||||||
|
{ x: 920, y: 520 },
|
||||||
|
).draft;
|
||||||
const moved = setNodePosition(draft, "rename-folder", { x: 520, y: 240 });
|
const moved = setNodePosition(draft, "rename-folder", { x: 520, y: 240 });
|
||||||
const connected = connectNodesInDraft(moved, "source-asset", "validate-structure");
|
const connected = connectNodesInDraft(moved, "validate-structure", "export-delivery-package-1");
|
||||||
const next = setViewportInDraft(connected, { x: -120, y: 45, zoom: 1.35 });
|
const next = setViewportInDraft(connected, { x: -120, y: 45, zoom: 1.35 });
|
||||||
const payload = serializeWorkflowDraft(next);
|
const payload = serializeWorkflowDraft(next);
|
||||||
|
|
||||||
assert.deepEqual(payload.visualGraph.nodePositions["rename-folder"], { x: 520, y: 240 });
|
assert.deepEqual(payload.visualGraph.nodePositions["rename-folder"], { x: 520, y: 240 });
|
||||||
assert.deepEqual(payload.visualGraph.viewport, { x: -120, y: 45, zoom: 1.35 });
|
assert.deepEqual(payload.visualGraph.viewport, { x: -120, y: 45, zoom: 1.35 });
|
||||||
assert.deepEqual(payload.logicGraph.edges.at(-1), {
|
assert.deepEqual(payload.logicGraph.edges.at(-1), {
|
||||||
from: "source-asset",
|
from: "validate-structure",
|
||||||
to: "validate-structure",
|
to: "export-delivery-package-1",
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("reject invalid connections for self edges, duplicate edges, cycles, and multiple inbound edges", () => {
|
||||||
|
const draft = createDefaultWorkflowDraft();
|
||||||
|
|
||||||
|
assert.equal(canConnectNodesInDraft(draft, "source-asset", "source-asset").ok, false);
|
||||||
|
assert.equal(
|
||||||
|
canConnectNodesInDraft(draft, "source-asset", "rename-folder").reason,
|
||||||
|
"duplicate",
|
||||||
|
);
|
||||||
|
assert.equal(
|
||||||
|
canConnectNodesInDraft(draft, "validate-structure", "source-asset").reason,
|
||||||
|
"target_disallows_incoming",
|
||||||
|
);
|
||||||
|
assert.equal(
|
||||||
|
canConnectNodesInDraft(draft, "validate-structure", "rename-folder").reason,
|
||||||
|
"target_already_has_incoming",
|
||||||
|
);
|
||||||
|
assert.equal(
|
||||||
|
canConnectNodesInDraft(draft, "rename-folder", "source-asset").reason,
|
||||||
|
"target_disallows_incoming",
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("reject connections that would create a cycle or start from an export node", () => {
|
||||||
|
const draft = createDefaultWorkflowDraft();
|
||||||
|
const withExport = addNodeToDraftAtPosition(
|
||||||
|
draft,
|
||||||
|
{
|
||||||
|
id: "export-delivery-package",
|
||||||
|
name: "Export Delivery Package",
|
||||||
|
category: "Export",
|
||||||
|
},
|
||||||
|
{ x: 900, y: 500 },
|
||||||
|
).draft;
|
||||||
|
const connected = connectNodesInDraft(withExport, "validate-structure", "export-delivery-package-1");
|
||||||
|
|
||||||
|
assert.equal(
|
||||||
|
canConnectNodesInDraft(connected, "export-delivery-package-1", "rename-folder").reason,
|
||||||
|
"source_disallows_outgoing",
|
||||||
|
);
|
||||||
|
assert.equal(
|
||||||
|
canConnectNodesInDraft(connected, "validate-structure", "source-asset").reason,
|
||||||
|
"target_disallows_incoming",
|
||||||
|
);
|
||||||
|
assert.equal(
|
||||||
|
canConnectNodesInDraft(connected, "rename-folder", "source-asset").reason,
|
||||||
|
"target_disallows_incoming",
|
||||||
|
);
|
||||||
|
assert.equal(
|
||||||
|
canConnectNodesInDraft(connected, "export-delivery-package-1", "validate-structure").reason,
|
||||||
|
"source_disallows_outgoing",
|
||||||
|
);
|
||||||
|
assert.equal(
|
||||||
|
canConnectNodesInDraft(connected, "validate-structure", "source-asset").ok,
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
assert.equal(
|
||||||
|
canConnectNodesInDraft(connected, "validate-structure", "rename-folder").reason,
|
||||||
|
"target_already_has_incoming",
|
||||||
|
);
|
||||||
|
assert.equal(
|
||||||
|
canConnectNodesInDraft(connected, "rename-folder", "source-asset").ok,
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
assert.equal(
|
||||||
|
canConnectNodesInDraft(connected, "source-asset", "validate-structure").reason,
|
||||||
|
"target_already_has_incoming",
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("reject connections that would form a back edge cycle when target accepts inbound edges", () => {
|
||||||
|
const draft = workflowDraftFromVersion({
|
||||||
|
logicGraph: {
|
||||||
|
nodes: [
|
||||||
|
{ id: "source-asset", type: "source" },
|
||||||
|
{ id: "rename-folder", type: "transform" },
|
||||||
|
{ id: "validate-structure", type: "inspect" },
|
||||||
|
{ id: "validate-metadata-1", type: "inspect" },
|
||||||
|
],
|
||||||
|
edges: [
|
||||||
|
{ from: "source-asset", to: "rename-folder" },
|
||||||
|
{ from: "rename-folder", to: "validate-structure" },
|
||||||
|
{ from: "validate-structure", to: "validate-metadata-1" },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(
|
||||||
|
canConnectNodesInDraft(draft, "validate-metadata-1", "rename-folder").reason,
|
||||||
|
"target_already_has_incoming",
|
||||||
|
);
|
||||||
|
assert.equal(
|
||||||
|
canConnectNodesInDraft(removeNodeFromDraft(draft, "source-asset"), "validate-metadata-1", "rename-folder")
|
||||||
|
.reason,
|
||||||
|
"cycle",
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("allow multi-inbound utility set nodes while still blocking cycles", () => {
|
||||||
|
const draft = workflowDraftFromVersion({
|
||||||
|
logicGraph: {
|
||||||
|
nodes: [
|
||||||
|
{ id: "source-a", type: "source" },
|
||||||
|
{ id: "source-b", type: "source" },
|
||||||
|
{ id: "intersect-assets-1", type: "utility" },
|
||||||
|
],
|
||||||
|
edges: [],
|
||||||
|
},
|
||||||
|
runtimeGraph: {
|
||||||
|
nodeBindings: {
|
||||||
|
"source-a": "source-asset",
|
||||||
|
"source-b": "source-asset",
|
||||||
|
"intersect-assets-1": "intersect-assets",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const withFirst = connectNodesInDraft(draft, "source-a", "intersect-assets-1");
|
||||||
|
|
||||||
|
assert.equal(
|
||||||
|
canConnectNodesInDraft(withFirst, "source-b", "intersect-assets-1").ok,
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
assert.equal(
|
||||||
|
canConnectNodesInDraft(withFirst, "intersect-assets-1", "source-a").reason,
|
||||||
|
"target_disallows_incoming",
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("allow multi-inbound custom nodes when their seeded runtime contract expects multiple asset sets", () => {
|
||||||
|
const base = workflowDraftFromVersion({
|
||||||
|
logicGraph: {
|
||||||
|
nodes: [
|
||||||
|
{ id: "source-a", type: "source" },
|
||||||
|
{ id: "source-b", type: "source" },
|
||||||
|
],
|
||||||
|
edges: [],
|
||||||
|
},
|
||||||
|
runtimeGraph: {
|
||||||
|
nodeBindings: {
|
||||||
|
"source-a": "source-asset",
|
||||||
|
"source-b": "source-asset",
|
||||||
|
},
|
||||||
|
nodeConfigs: {},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const appended = addNodeToDraftAtPosition(
|
||||||
|
base,
|
||||||
|
{
|
||||||
|
id: "custom-merge-assets",
|
||||||
|
name: "Custom Merge Assets",
|
||||||
|
category: "Utility",
|
||||||
|
defaultExecutorType: "docker",
|
||||||
|
defaultExecutorConfig: {
|
||||||
|
image: "python:3.11-alpine",
|
||||||
|
contract: {
|
||||||
|
inputMode: "multi_asset_set",
|
||||||
|
outputMode: "asset_set",
|
||||||
|
artifactType: "json",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
allowsMultipleIncoming: true,
|
||||||
|
},
|
||||||
|
{ x: 540, y: 200 },
|
||||||
|
);
|
||||||
|
|
||||||
|
const withFirst = connectNodesInDraft(appended.draft, "source-a", appended.nodeId);
|
||||||
|
|
||||||
|
assert.equal(
|
||||||
|
canConnectNodesInDraft(withFirst, "source-b", appended.nodeId).ok,
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|||||||
@ -26,6 +26,10 @@ export type WorkflowNodeDefinitionSummary = {
|
|||||||
id: string;
|
id: string;
|
||||||
name: string;
|
name: string;
|
||||||
category?: string;
|
category?: string;
|
||||||
|
defaultExecutorType?: "python" | "docker" | "http";
|
||||||
|
defaultExecutorConfig?: Record<string, unknown>;
|
||||||
|
allowsMultipleIncoming?: boolean;
|
||||||
|
supportsCodeHook?: boolean;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type WorkflowCodeHookSpec = {
|
export type WorkflowCodeHookSpec = {
|
||||||
@ -59,12 +63,57 @@ export type WorkflowDraft = {
|
|||||||
|
|
||||||
type WorkflowVersionLike = Partial<WorkflowDraft>;
|
type WorkflowVersionLike = Partial<WorkflowDraft>;
|
||||||
|
|
||||||
|
export type WorkflowConnectionValidationReason =
|
||||||
|
| "missing_source"
|
||||||
|
| "missing_target"
|
||||||
|
| "self"
|
||||||
|
| "duplicate"
|
||||||
|
| "source_disallows_outgoing"
|
||||||
|
| "target_disallows_incoming"
|
||||||
|
| "target_already_has_incoming"
|
||||||
|
| "cycle";
|
||||||
|
|
||||||
|
export type WorkflowConnectionValidationResult =
|
||||||
|
| { ok: true }
|
||||||
|
| { ok: false; reason: WorkflowConnectionValidationReason };
|
||||||
|
|
||||||
const DEFAULT_VIEWPORT: WorkflowViewport = { x: 0, y: 0, zoom: 1 };
|
const DEFAULT_VIEWPORT: WorkflowViewport = { x: 0, y: 0, zoom: 1 };
|
||||||
const DEFAULT_NODE_LAYOUT: Record<string, WorkflowPoint> = {
|
const DEFAULT_NODE_LAYOUT: Record<string, WorkflowPoint> = {
|
||||||
"source-asset": { x: 120, y: 120 },
|
"source-asset": { x: 120, y: 120 },
|
||||||
"rename-folder": { x: 430, y: 280 },
|
"rename-folder": { x: 430, y: 280 },
|
||||||
"validate-structure": { x: 760, y: 450 },
|
"validate-structure": { x: 760, y: 450 },
|
||||||
};
|
};
|
||||||
|
const MULTI_INPUT_NODE_DEFINITION_IDS = new Set(["union-assets", "intersect-assets", "difference-assets"]);
|
||||||
|
|
||||||
|
function cloneRuntimeConfig(config: WorkflowNodeRuntimeConfig) {
|
||||||
|
return {
|
||||||
|
...config,
|
||||||
|
executorConfig: config.executorConfig ? { ...config.executorConfig } : undefined,
|
||||||
|
codeHookSpec: config.codeHookSpec ? { ...config.codeHookSpec } : undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function createDefaultRuntimeConfigForDefinition(
|
||||||
|
definition: WorkflowNodeDefinitionSummary,
|
||||||
|
): WorkflowNodeRuntimeConfig | undefined {
|
||||||
|
const executorType = definition.defaultExecutorType;
|
||||||
|
const executorConfig = definition.defaultExecutorConfig ? { ...definition.defaultExecutorConfig } : undefined;
|
||||||
|
|
||||||
|
if (!executorType && !executorConfig) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
const config: WorkflowNodeRuntimeConfig = {
|
||||||
|
definitionId: definition.id,
|
||||||
|
};
|
||||||
|
if (executorType) {
|
||||||
|
config.executorType = executorType;
|
||||||
|
}
|
||||||
|
if (executorConfig && Object.keys(executorConfig).length > 0) {
|
||||||
|
config.executorConfig = executorConfig;
|
||||||
|
}
|
||||||
|
return config;
|
||||||
|
}
|
||||||
|
|
||||||
function createDefaultNodePosition(index: number): WorkflowPoint {
|
function createDefaultNodePosition(index: number): WorkflowPoint {
|
||||||
const column = index % 3;
|
const column = index % 3;
|
||||||
@ -107,11 +156,7 @@ function cloneDraft(draft: WorkflowDraft): WorkflowDraft {
|
|||||||
nodeConfigs: Object.fromEntries(
|
nodeConfigs: Object.fromEntries(
|
||||||
Object.entries(draft.runtimeGraph.nodeConfigs ?? {}).map(([nodeId, config]) => [
|
Object.entries(draft.runtimeGraph.nodeConfigs ?? {}).map(([nodeId, config]) => [
|
||||||
nodeId,
|
nodeId,
|
||||||
{
|
cloneRuntimeConfig(config),
|
||||||
...config,
|
|
||||||
executorConfig: config.executorConfig ? { ...config.executorConfig } : undefined,
|
|
||||||
codeHookSpec: config.codeHookSpec ? { ...config.codeHookSpec } : undefined,
|
|
||||||
},
|
|
||||||
]),
|
]),
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
@ -221,6 +266,30 @@ export function workflowDraftFromVersion(version?: WorkflowVersionLike | null):
|
|||||||
export function addNodeToDraft(
|
export function addNodeToDraft(
|
||||||
draft: WorkflowDraft,
|
draft: WorkflowDraft,
|
||||||
definition: WorkflowNodeDefinitionSummary,
|
definition: WorkflowNodeDefinitionSummary,
|
||||||
|
): { draft: WorkflowDraft; nodeId: string } {
|
||||||
|
return addNodeToDraftInternal(draft, definition, {
|
||||||
|
connectFromPrevious: true,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export function addNodeToDraftAtPosition(
|
||||||
|
draft: WorkflowDraft,
|
||||||
|
definition: WorkflowNodeDefinitionSummary,
|
||||||
|
position: WorkflowPoint,
|
||||||
|
): { draft: WorkflowDraft; nodeId: string } {
|
||||||
|
return addNodeToDraftInternal(draft, definition, {
|
||||||
|
connectFromPrevious: false,
|
||||||
|
position,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function addNodeToDraftInternal(
|
||||||
|
draft: WorkflowDraft,
|
||||||
|
definition: WorkflowNodeDefinitionSummary,
|
||||||
|
options: {
|
||||||
|
connectFromPrevious: boolean;
|
||||||
|
position?: WorkflowPoint;
|
||||||
|
},
|
||||||
): { draft: WorkflowDraft; nodeId: string } {
|
): { draft: WorkflowDraft; nodeId: string } {
|
||||||
const next = cloneDraft(draft);
|
const next = cloneDraft(draft);
|
||||||
let suffix = 1;
|
let suffix = 1;
|
||||||
@ -237,13 +306,18 @@ export function addNodeToDraft(
|
|||||||
};
|
};
|
||||||
const previousNode = next.logicGraph.nodes.at(-1);
|
const previousNode = next.logicGraph.nodes.at(-1);
|
||||||
next.logicGraph.nodes.push(node);
|
next.logicGraph.nodes.push(node);
|
||||||
if (previousNode) {
|
if (options.connectFromPrevious && previousNode) {
|
||||||
next.logicGraph.edges.push({ from: previousNode.id, to: nodeId });
|
next.logicGraph.edges.push({ from: previousNode.id, to: nodeId });
|
||||||
}
|
}
|
||||||
next.runtimeGraph.nodeBindings ??= {};
|
next.runtimeGraph.nodeBindings ??= {};
|
||||||
next.runtimeGraph.nodeBindings[nodeId] = definition.id;
|
next.runtimeGraph.nodeBindings[nodeId] = definition.id;
|
||||||
next.runtimeGraph.nodeConfigs ??= {};
|
next.runtimeGraph.nodeConfigs ??= {};
|
||||||
next.visualGraph.nodePositions[nodeId] = createDefaultNodePosition(next.logicGraph.nodes.length - 1);
|
const defaultRuntimeConfig = createDefaultRuntimeConfigForDefinition(definition);
|
||||||
|
if (defaultRuntimeConfig) {
|
||||||
|
next.runtimeGraph.nodeConfigs[nodeId] = defaultRuntimeConfig;
|
||||||
|
}
|
||||||
|
next.visualGraph.nodePositions[nodeId] =
|
||||||
|
options.position ?? createDefaultNodePosition(next.logicGraph.nodes.length - 1);
|
||||||
|
|
||||||
return { draft: next, nodeId };
|
return { draft: next, nodeId };
|
||||||
}
|
}
|
||||||
@ -293,21 +367,115 @@ export function setNodePosition(draft: WorkflowDraft, nodeId: string, position:
|
|||||||
return next;
|
return next;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function findNode(draft: WorkflowDraft, nodeId: string) {
|
||||||
|
return draft.logicGraph.nodes.find((node) => node.id === nodeId) ?? null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function nodeDisallowsOutgoing(node: WorkflowLogicNode) {
|
||||||
|
return node.type === "export";
|
||||||
|
}
|
||||||
|
|
||||||
|
function nodeDisallowsIncoming(node: WorkflowLogicNode) {
|
||||||
|
return node.type === "source";
|
||||||
|
}
|
||||||
|
|
||||||
|
function allowsMultipleIncoming(draft: WorkflowDraft, nodeId: string) {
|
||||||
|
const definitionId = resolveDefinitionIdForNode(draft, nodeId);
|
||||||
|
if (MULTI_INPUT_NODE_DEFINITION_IDS.has(definitionId)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
const contract =
|
||||||
|
draft.runtimeGraph.nodeConfigs?.[nodeId]?.executorConfig &&
|
||||||
|
typeof draft.runtimeGraph.nodeConfigs[nodeId]?.executorConfig === "object" &&
|
||||||
|
!Array.isArray(draft.runtimeGraph.nodeConfigs[nodeId]?.executorConfig)
|
||||||
|
? (draft.runtimeGraph.nodeConfigs[nodeId]?.executorConfig as { contract?: { inputMode?: string } }).contract
|
||||||
|
: undefined;
|
||||||
|
return contract?.inputMode === "multi_asset_set";
|
||||||
|
}
|
||||||
|
|
||||||
|
function wouldCreateCycle(draft: WorkflowDraft, sourceNodeId: string, targetNodeId: string) {
|
||||||
|
const adjacency = new Map<string, string[]>();
|
||||||
|
for (const edge of draft.logicGraph.edges) {
|
||||||
|
const current = adjacency.get(edge.from) ?? [];
|
||||||
|
current.push(edge.to);
|
||||||
|
adjacency.set(edge.from, current);
|
||||||
|
}
|
||||||
|
|
||||||
|
const stack = [targetNodeId];
|
||||||
|
const visited = new Set<string>();
|
||||||
|
while (stack.length > 0) {
|
||||||
|
const currentNodeId = stack.pop();
|
||||||
|
if (!currentNodeId || visited.has(currentNodeId)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (currentNodeId === sourceNodeId) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
visited.add(currentNodeId);
|
||||||
|
for (const nextNodeId of adjacency.get(currentNodeId) ?? []) {
|
||||||
|
stack.push(nextNodeId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function canConnectNodesInDraft(
|
||||||
|
draft: WorkflowDraft,
|
||||||
|
sourceNodeId: string | null | undefined,
|
||||||
|
targetNodeId: string | null | undefined,
|
||||||
|
): WorkflowConnectionValidationResult {
|
||||||
|
if (!sourceNodeId) {
|
||||||
|
return { ok: false, reason: "missing_source" };
|
||||||
|
}
|
||||||
|
if (!targetNodeId) {
|
||||||
|
return { ok: false, reason: "missing_target" };
|
||||||
|
}
|
||||||
|
if (sourceNodeId === targetNodeId) {
|
||||||
|
return { ok: false, reason: "self" };
|
||||||
|
}
|
||||||
|
|
||||||
|
const sourceNode = findNode(draft, sourceNodeId);
|
||||||
|
if (!sourceNode) {
|
||||||
|
return { ok: false, reason: "missing_source" };
|
||||||
|
}
|
||||||
|
|
||||||
|
const targetNode = findNode(draft, targetNodeId);
|
||||||
|
if (!targetNode) {
|
||||||
|
return { ok: false, reason: "missing_target" };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (draft.logicGraph.edges.some((edge) => edge.from === sourceNodeId && edge.to === targetNodeId)) {
|
||||||
|
return { ok: false, reason: "duplicate" };
|
||||||
|
}
|
||||||
|
if (nodeDisallowsOutgoing(sourceNode)) {
|
||||||
|
return { ok: false, reason: "source_disallows_outgoing" };
|
||||||
|
}
|
||||||
|
if (nodeDisallowsIncoming(targetNode)) {
|
||||||
|
return { ok: false, reason: "target_disallows_incoming" };
|
||||||
|
}
|
||||||
|
if (!allowsMultipleIncoming(draft, targetNodeId) && draft.logicGraph.edges.some((edge) => edge.to === targetNodeId)) {
|
||||||
|
return { ok: false, reason: "target_already_has_incoming" };
|
||||||
|
}
|
||||||
|
if (wouldCreateCycle(draft, sourceNodeId, targetNodeId)) {
|
||||||
|
return { ok: false, reason: "cycle" };
|
||||||
|
}
|
||||||
|
|
||||||
|
return { ok: true };
|
||||||
|
}
|
||||||
|
|
||||||
export function connectNodesInDraft(
|
export function connectNodesInDraft(
|
||||||
draft: WorkflowDraft,
|
draft: WorkflowDraft,
|
||||||
sourceNodeId: string | null | undefined,
|
sourceNodeId: string | null | undefined,
|
||||||
targetNodeId: string | null | undefined,
|
targetNodeId: string | null | undefined,
|
||||||
): WorkflowDraft {
|
): WorkflowDraft {
|
||||||
if (!sourceNodeId || !targetNodeId || sourceNodeId === targetNodeId) {
|
const validation = canConnectNodesInDraft(draft, sourceNodeId, targetNodeId);
|
||||||
|
if (!validation.ok || !sourceNodeId || !targetNodeId) {
|
||||||
return draft;
|
return draft;
|
||||||
}
|
}
|
||||||
const next = cloneDraft(draft);
|
const next = cloneDraft(draft);
|
||||||
const exists = next.logicGraph.edges.some(
|
|
||||||
(edge) => edge.from === sourceNodeId && edge.to === targetNodeId,
|
|
||||||
);
|
|
||||||
if (!exists) {
|
|
||||||
next.logicGraph.edges.push({ from: sourceNodeId, to: targetNodeId });
|
next.logicGraph.edges.push({ from: sourceNodeId, to: targetNodeId });
|
||||||
}
|
|
||||||
return next;
|
return next;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -22,6 +22,7 @@ a {
|
|||||||
|
|
||||||
button,
|
button,
|
||||||
input,
|
input,
|
||||||
|
select,
|
||||||
textarea {
|
textarea {
|
||||||
font: inherit;
|
font: inherit;
|
||||||
}
|
}
|
||||||
@ -66,6 +67,14 @@ textarea {
|
|||||||
min-width: 140px;
|
min-width: 140px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.app-header__select {
|
||||||
|
min-width: 220px;
|
||||||
|
border: 1px solid #cbd5e1;
|
||||||
|
border-radius: 10px;
|
||||||
|
padding: 10px 12px;
|
||||||
|
background: #f8fafc;
|
||||||
|
}
|
||||||
|
|
||||||
.app-header__label {
|
.app-header__label {
|
||||||
font-size: 12px;
|
font-size: 12px;
|
||||||
color: #6b7280;
|
color: #6b7280;
|
||||||
@ -178,6 +187,7 @@ textarea {
|
|||||||
}
|
}
|
||||||
|
|
||||||
.field-grid input,
|
.field-grid input,
|
||||||
|
.field-grid select,
|
||||||
.field-grid textarea {
|
.field-grid textarea {
|
||||||
border: 1px solid #cbd5e1;
|
border: 1px solid #cbd5e1;
|
||||||
border-radius: 10px;
|
border-radius: 10px;
|
||||||
@ -228,11 +238,17 @@ textarea {
|
|||||||
color: #6b7280;
|
color: #6b7280;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.asset-card[data-active="true"] {
|
||||||
|
border-color: #111827;
|
||||||
|
box-shadow: 0 0 0 1px rgba(17, 24, 39, 0.08);
|
||||||
|
}
|
||||||
|
|
||||||
.workflow-canvas-panel {
|
.workflow-canvas-panel {
|
||||||
overflow: hidden;
|
overflow: hidden;
|
||||||
}
|
}
|
||||||
|
|
||||||
.workflow-canvas-shell {
|
.workflow-canvas-shell {
|
||||||
|
position: relative;
|
||||||
height: 680px;
|
height: 680px;
|
||||||
margin-top: 12px;
|
margin-top: 12px;
|
||||||
border: 1px solid #d4d4d8;
|
border: 1px solid #d4d4d8;
|
||||||
@ -243,6 +259,26 @@ textarea {
|
|||||||
linear-gradient(180deg, #ffffff 0%, #f4f6fb 100%);
|
linear-gradient(180deg, #ffffff 0%, #f4f6fb 100%);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.workflow-canvas-shell[data-drop-active="true"] {
|
||||||
|
border-color: #0284c7;
|
||||||
|
box-shadow: inset 0 0 0 2px rgba(2, 132, 199, 0.15);
|
||||||
|
}
|
||||||
|
|
||||||
|
.workflow-canvas-drop-hint {
|
||||||
|
position: absolute;
|
||||||
|
top: 16px;
|
||||||
|
left: 50%;
|
||||||
|
transform: translateX(-50%);
|
||||||
|
z-index: 5;
|
||||||
|
border-radius: 999px;
|
||||||
|
padding: 8px 14px;
|
||||||
|
background: rgba(2, 132, 199, 0.92);
|
||||||
|
color: #f8fafc;
|
||||||
|
font-size: 13px;
|
||||||
|
font-weight: 600;
|
||||||
|
pointer-events: none;
|
||||||
|
}
|
||||||
|
|
||||||
.workflow-canvas-footer {
|
.workflow-canvas-footer {
|
||||||
display: flex;
|
display: flex;
|
||||||
justify-content: space-between;
|
justify-content: space-between;
|
||||||
@ -252,6 +288,35 @@ textarea {
|
|||||||
font-size: 13px;
|
font-size: 13px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.template-save-section {
|
||||||
|
margin-top: 24px;
|
||||||
|
padding-top: 16px;
|
||||||
|
border-top: 1px solid #e5e7eb;
|
||||||
|
}
|
||||||
|
|
||||||
|
.template-save-section h3 {
|
||||||
|
margin: 0 0 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.workflow-canvas-feedback {
|
||||||
|
margin-left: auto;
|
||||||
|
padding: 6px 10px;
|
||||||
|
border-radius: 999px;
|
||||||
|
background: #fee2e2;
|
||||||
|
color: #991b1b;
|
||||||
|
font-size: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.workflow-node-library-item {
|
||||||
|
width: 100%;
|
||||||
|
text-align: left;
|
||||||
|
cursor: grab;
|
||||||
|
}
|
||||||
|
|
||||||
|
.workflow-node-library-item:active {
|
||||||
|
cursor: grabbing;
|
||||||
|
}
|
||||||
|
|
||||||
.workflow-flow-node {
|
.workflow-flow-node {
|
||||||
min-width: 220px;
|
min-width: 220px;
|
||||||
padding: 0;
|
padding: 0;
|
||||||
|
|||||||
@ -1,5 +1,10 @@
|
|||||||
export type ExecutorType = "python" | "docker" | "http";
|
export type ExecutorType = "python" | "docker" | "http";
|
||||||
export type ArtifactType = "json" | "directory" | "video";
|
export type ArtifactType = "json" | "directory" | "video";
|
||||||
|
export type WorkflowInputBindingKind = "asset" | "dataset";
|
||||||
|
export type WorkflowInputBinding = {
|
||||||
|
kind: WorkflowInputBindingKind;
|
||||||
|
id: string;
|
||||||
|
};
|
||||||
export type TaskStatus = "pending" | "queued" | "running" | "success" | "failed" | "cancelled";
|
export type TaskStatus = "pending" | "queued" | "running" | "success" | "failed" | "cancelled";
|
||||||
export type TaskStatusCounts = {
|
export type TaskStatusCounts = {
|
||||||
pending: number;
|
pending: number;
|
||||||
@ -72,7 +77,9 @@ export type TaskRecord = {
|
|||||||
artifactTitle?: string;
|
artifactTitle?: string;
|
||||||
status: TaskStatus;
|
status: TaskStatus;
|
||||||
attempt?: number;
|
attempt?: number;
|
||||||
|
inputBindings?: WorkflowInputBinding[];
|
||||||
assetIds?: string[];
|
assetIds?: string[];
|
||||||
|
datasetIds?: string[];
|
||||||
upstreamNodeIds?: string[];
|
upstreamNodeIds?: string[];
|
||||||
outputArtifactIds?: string[];
|
outputArtifactIds?: string[];
|
||||||
errorMessage?: string;
|
errorMessage?: string;
|
||||||
@ -95,12 +102,34 @@ export type ExecutionAsset = {
|
|||||||
summary?: Record<string, unknown>;
|
summary?: Record<string, unknown>;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export type ExecutionDataset = {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
storagePath?: string;
|
||||||
|
sourceAssetIds: string[];
|
||||||
|
latestVersionId?: string;
|
||||||
|
latestVersionNumber?: number;
|
||||||
|
summary?: Record<string, unknown>;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type UpstreamExecutionResult = {
|
||||||
|
taskId: string;
|
||||||
|
nodeId: string;
|
||||||
|
nodeDefinitionId?: string;
|
||||||
|
assetIds: string[];
|
||||||
|
result?: Record<string, unknown>;
|
||||||
|
};
|
||||||
|
|
||||||
export type ExecutionContext = {
|
export type ExecutionContext = {
|
||||||
taskId: string;
|
taskId: string;
|
||||||
workflowRunId?: string;
|
workflowRunId?: string;
|
||||||
workflowVersionId?: string;
|
workflowVersionId?: string;
|
||||||
nodeId: string;
|
nodeId: string;
|
||||||
|
inputBindings?: WorkflowInputBinding[];
|
||||||
assetIds?: string[];
|
assetIds?: string[];
|
||||||
assets?: ExecutionAsset[];
|
assets?: ExecutionAsset[];
|
||||||
|
datasetIds?: string[];
|
||||||
|
datasets?: ExecutionDataset[];
|
||||||
nodeDefinitionId?: string;
|
nodeDefinitionId?: string;
|
||||||
|
upstreamResults?: UpstreamExecutionResult[];
|
||||||
};
|
};
|
||||||
|
|||||||
@ -1,9 +1,11 @@
|
|||||||
|
import { createHash } from "node:crypto";
|
||||||
import { spawn } from "node:child_process";
|
import { spawn } from "node:child_process";
|
||||||
import { mkdtemp, readFile, rm, stat, writeFile } from "node:fs/promises";
|
import { mkdtemp, readFile, rm, stat, writeFile } from "node:fs/promises";
|
||||||
import os from "node:os";
|
import os from "node:os";
|
||||||
import path from "node:path";
|
import path from "node:path";
|
||||||
|
|
||||||
import type {
|
import type {
|
||||||
|
ExecutionAsset,
|
||||||
ExecutionContext,
|
ExecutionContext,
|
||||||
ExecutorExecutionResult,
|
ExecutorExecutionResult,
|
||||||
TaskRecord,
|
TaskRecord,
|
||||||
@ -45,18 +47,288 @@ function parseDockerResult(payload: unknown) {
|
|||||||
return payload;
|
return payload;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function runProcess(command: string, args: string[]) {
|
||||||
|
const child = spawn(command, args, {
|
||||||
|
stdio: ["ignore", "pipe", "pipe"],
|
||||||
|
});
|
||||||
|
|
||||||
|
let stdout = "";
|
||||||
|
let stderr = "";
|
||||||
|
child.stdout.on("data", (chunk) => {
|
||||||
|
stdout += String(chunk);
|
||||||
|
});
|
||||||
|
child.stderr.on("data", (chunk) => {
|
||||||
|
stderr += String(chunk);
|
||||||
|
});
|
||||||
|
|
||||||
|
const exitCode = await new Promise<number>((resolve, reject) => {
|
||||||
|
child.on("error", reject);
|
||||||
|
child.on("close", (code) => resolve(code ?? 1));
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
exitCode,
|
||||||
|
stdoutLines: splitOutputLines(stdout),
|
||||||
|
stderrLines: splitOutputLines(stderr),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async function ensureImageFromDockerfile(dockerfileContent: string, requestedTag?: string) {
|
||||||
|
const imageTag = requestedTag && requestedTag.trim().length > 0
|
||||||
|
? requestedTag.trim()
|
||||||
|
: `emboflow/custom-node:${createHash("sha256").update(dockerfileContent).digest("hex").slice(0, 16)}`;
|
||||||
|
const inspected = await runProcess("docker", ["image", "inspect", imageTag]);
|
||||||
|
if (inspected.exitCode === 0) {
|
||||||
|
return imageTag;
|
||||||
|
}
|
||||||
|
|
||||||
|
const buildDir = await mkdtemp(path.join(os.tmpdir(), "emboflow-custom-node-build-"));
|
||||||
|
try {
|
||||||
|
await writeFile(path.join(buildDir, "Dockerfile"), dockerfileContent);
|
||||||
|
const build = await runProcess("docker", ["build", "-t", imageTag, buildDir]);
|
||||||
|
if (build.exitCode !== 0) {
|
||||||
|
throw Object.assign(new Error(`docker build failed for ${imageTag}`), {
|
||||||
|
stdoutLines: build.stdoutLines,
|
||||||
|
stderrLines: build.stderrLines,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return imageTag;
|
||||||
|
} finally {
|
||||||
|
await rm(buildDir, { recursive: true, force: true });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function validateCustomNodeResult(task: TaskRecord, result: unknown) {
|
||||||
|
const contract =
|
||||||
|
task.executorConfig?.contract && typeof task.executorConfig.contract === "object" && !Array.isArray(task.executorConfig.contract)
|
||||||
|
? task.executorConfig.contract as { outputMode?: string }
|
||||||
|
: undefined;
|
||||||
|
if (!contract) {
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
if (!result || typeof result !== "object" || Array.isArray(result)) {
|
||||||
|
throw new Error("custom docker nodes must write an object result");
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
(contract.outputMode === "asset_set" || contract.outputMode === "asset_set_with_report") &&
|
||||||
|
!Array.isArray((result as { assetIds?: unknown }).assetIds)
|
||||||
|
) {
|
||||||
|
throw new Error("custom asset-set nodes must return result.assetIds as a string array");
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildContainerAssetContext(workdir: string, assets: ExecutionAsset[] = []) {
|
||||||
|
const volumeArgs: string[] = [];
|
||||||
|
const containerAssets = assets.map((asset) => {
|
||||||
|
if (!asset.sourcePath || !path.isAbsolute(asset.sourcePath)) {
|
||||||
|
return asset;
|
||||||
|
}
|
||||||
|
|
||||||
|
const containerSourcePath = path.posix.join(workdir, "mounted-assets", asset.id);
|
||||||
|
volumeArgs.push("--volume", `${asset.sourcePath}:${containerSourcePath}:ro`);
|
||||||
|
return {
|
||||||
|
...asset,
|
||||||
|
sourcePath: containerSourcePath,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
assets: containerAssets,
|
||||||
|
volumeArgs,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function createDockerRunner() {
|
||||||
|
return [
|
||||||
|
"import contextlib",
|
||||||
|
"import io",
|
||||||
|
"import json",
|
||||||
|
"import pathlib",
|
||||||
|
"import sys",
|
||||||
|
"",
|
||||||
|
"REQUIRED_DELIVERY_FILES = ['meta.json', 'intrinsics.json', 'video_meta.json']",
|
||||||
|
"",
|
||||||
|
"def dedupe_asset_ids(asset_ids):",
|
||||||
|
" return list(dict.fromkeys([asset_id for asset_id in asset_ids if isinstance(asset_id, str) and asset_id]))",
|
||||||
|
"",
|
||||||
|
"def count_video_files(source_path):",
|
||||||
|
" path = pathlib.Path(source_path)",
|
||||||
|
" if not path.exists():",
|
||||||
|
" return 0",
|
||||||
|
" if path.is_file():",
|
||||||
|
" return 1 if path.name.lower().endswith('.mp4') else 0",
|
||||||
|
" return sum(1 for child in path.rglob('*') if child.is_file() and child.name.lower().endswith('.mp4'))",
|
||||||
|
"",
|
||||||
|
"def build_source_result(context):",
|
||||||
|
" assets = context.get('assets', [])",
|
||||||
|
" asset_ids = [asset.get('id') for asset in assets if asset.get('id')]",
|
||||||
|
" print(f\"loaded {len(asset_ids)} bound asset{'s' if len(asset_ids) != 1 else ''}\")",
|
||||||
|
" return {",
|
||||||
|
" 'assetIds': asset_ids,",
|
||||||
|
" 'assetCount': len(asset_ids),",
|
||||||
|
" 'assets': assets,",
|
||||||
|
" }",
|
||||||
|
"",
|
||||||
|
"def build_validate_structure_result(context):",
|
||||||
|
" assets = context.get('assets', [])",
|
||||||
|
" summaries = []",
|
||||||
|
" for asset in assets:",
|
||||||
|
" top_level_paths = asset.get('topLevelPaths') or []",
|
||||||
|
" missing_required_files = [required for required in REQUIRED_DELIVERY_FILES if required not in top_level_paths]",
|
||||||
|
" video_file_count = count_video_files(asset.get('sourcePath')) if asset.get('sourcePath') else 0",
|
||||||
|
" summaries.append({",
|
||||||
|
" 'id': asset.get('id'),",
|
||||||
|
" 'displayName': asset.get('displayName'),",
|
||||||
|
" 'sourcePath': asset.get('sourcePath'),",
|
||||||
|
" 'detectedFormats': asset.get('detectedFormats') or [],",
|
||||||
|
" 'missingRequiredFiles': missing_required_files,",
|
||||||
|
" 'videoFileCount': video_file_count,",
|
||||||
|
" 'valid': len(missing_required_files) == 0 and video_file_count > 0,",
|
||||||
|
" })",
|
||||||
|
" asset_ids = [asset.get('id') for asset in assets if asset.get('id')]",
|
||||||
|
" missing_required = sorted({value for summary in summaries for value in summary['missingRequiredFiles']})",
|
||||||
|
" print(f\"validated {len(asset_ids)} asset{'s' if len(asset_ids) != 1 else ''}\")",
|
||||||
|
" result = {",
|
||||||
|
" 'assetCount': len(asset_ids),",
|
||||||
|
" 'requiredFiles': REQUIRED_DELIVERY_FILES,",
|
||||||
|
" 'videoFileCount': sum(summary['videoFileCount'] for summary in summaries),",
|
||||||
|
" 'valid': len(summaries) > 0 and all(summary['valid'] for summary in summaries),",
|
||||||
|
" }",
|
||||||
|
" if missing_required:",
|
||||||
|
" result['missingRequiredFiles'] = missing_required",
|
||||||
|
" return result",
|
||||||
|
"",
|
||||||
|
"def build_validate_metadata_result(context):",
|
||||||
|
" assets = context.get('assets', [])",
|
||||||
|
" summaries = []",
|
||||||
|
" for asset in assets:",
|
||||||
|
" top_level_paths = asset.get('topLevelPaths') or []",
|
||||||
|
" missing_required_files = [required for required in REQUIRED_DELIVERY_FILES if required not in top_level_paths]",
|
||||||
|
" summaries.append({",
|
||||||
|
" 'id': asset.get('id'),",
|
||||||
|
" 'displayName': asset.get('displayName'),",
|
||||||
|
" 'missingRequiredFiles': missing_required_files,",
|
||||||
|
" 'valid': len(missing_required_files) == 0,",
|
||||||
|
" })",
|
||||||
|
" asset_ids = [asset.get('id') for asset in assets if asset.get('id')]",
|
||||||
|
" missing_required = sorted({value for summary in summaries for value in summary['missingRequiredFiles']})",
|
||||||
|
" print(f\"validated metadata for {len(asset_ids)} asset{'s' if len(asset_ids) != 1 else ''}\")",
|
||||||
|
" result = {",
|
||||||
|
" 'assetIds': asset_ids,",
|
||||||
|
" 'assetCount': len(asset_ids),",
|
||||||
|
" 'requiredFiles': REQUIRED_DELIVERY_FILES,",
|
||||||
|
" 'valid': len(summaries) > 0 and all(summary['valid'] for summary in summaries),",
|
||||||
|
" 'assets': summaries,",
|
||||||
|
" }",
|
||||||
|
" if missing_required:",
|
||||||
|
" result['missingRequiredFiles'] = missing_required",
|
||||||
|
" return result",
|
||||||
|
"",
|
||||||
|
"def build_pass_through_result(context, operation):",
|
||||||
|
" asset_ids = dedupe_asset_ids(context.get('assetIds') or [])",
|
||||||
|
" print(f\"{operation} processed {len(asset_ids)} asset{'s' if len(asset_ids) != 1 else ''}\")",
|
||||||
|
" return {",
|
||||||
|
" 'operation': operation,",
|
||||||
|
" 'assetIds': asset_ids,",
|
||||||
|
" 'assetCount': len(asset_ids),",
|
||||||
|
" }",
|
||||||
|
"",
|
||||||
|
"def build_asset_set_result(context, operation):",
|
||||||
|
" upstream_results = context.get('upstreamResults') or []",
|
||||||
|
" upstream_sets = [dedupe_asset_ids(result.get('assetIds') or []) for result in upstream_results]",
|
||||||
|
" if operation == 'union':",
|
||||||
|
" asset_ids = dedupe_asset_ids([asset_id for asset_set in upstream_sets for asset_id in asset_set])",
|
||||||
|
" elif operation == 'intersect':",
|
||||||
|
" asset_ids = [] if not upstream_sets else list(upstream_sets[0])",
|
||||||
|
" for asset_set in upstream_sets[1:]:",
|
||||||
|
" asset_ids = [asset_id for asset_id in asset_ids if asset_id in asset_set]",
|
||||||
|
" else:",
|
||||||
|
" head = list(upstream_sets[0]) if upstream_sets else []",
|
||||||
|
" subtract = {asset_id for asset_set in upstream_sets[1:] for asset_id in asset_set}",
|
||||||
|
" asset_ids = [asset_id for asset_id in head if asset_id not in subtract]",
|
||||||
|
" assets_by_id = {asset.get('id'): asset for asset in context.get('assets', []) if asset.get('id')}",
|
||||||
|
" operation_label = 'intersection' if operation == 'intersect' else operation",
|
||||||
|
" print(f\"{operation_label} resolved {len(asset_ids)} asset{'s' if len(asset_ids) != 1 else ''}\")",
|
||||||
|
" return {",
|
||||||
|
" 'operation': operation,",
|
||||||
|
" 'upstreamCount': len(upstream_results),",
|
||||||
|
" 'assetIds': asset_ids,",
|
||||||
|
" 'assetCount': len(asset_ids),",
|
||||||
|
" 'assets': [assets_by_id[asset_id] for asset_id in asset_ids if asset_id in assets_by_id],",
|
||||||
|
" }",
|
||||||
|
"",
|
||||||
|
"def execute_hook(task, context):",
|
||||||
|
" hook = task.get('codeHookSpec') or {}",
|
||||||
|
" namespace = {}",
|
||||||
|
" stdout_buffer = io.StringIO()",
|
||||||
|
" entrypoint = hook.get('entrypoint') or 'process'",
|
||||||
|
" with contextlib.redirect_stdout(stdout_buffer):",
|
||||||
|
" exec(hook.get('source') or '', namespace)",
|
||||||
|
" candidate = namespace.get(entrypoint)",
|
||||||
|
" if not callable(candidate):",
|
||||||
|
" raise RuntimeError(f'Python hook entrypoint not found: {entrypoint}')",
|
||||||
|
" result = candidate(task, context)",
|
||||||
|
" stdout = stdout_buffer.getvalue()",
|
||||||
|
" if stdout:",
|
||||||
|
" sys.stdout.write(stdout)",
|
||||||
|
" return result",
|
||||||
|
"",
|
||||||
|
"def main():",
|
||||||
|
" payload = json.loads(pathlib.Path(sys.argv[1]).read_text())",
|
||||||
|
" output_path = pathlib.Path(sys.argv[2])",
|
||||||
|
" task = payload.get('task') or {}",
|
||||||
|
" context = payload.get('context') or {}",
|
||||||
|
" hook = task.get('codeHookSpec') or {}",
|
||||||
|
" definition_id = task.get('nodeDefinitionId') or task.get('nodeId')",
|
||||||
|
" if hook.get('source'):",
|
||||||
|
" result = execute_hook(task, context)",
|
||||||
|
" elif definition_id == 'source-asset':",
|
||||||
|
" result = build_source_result(context)",
|
||||||
|
" elif definition_id == 'validate-structure':",
|
||||||
|
" result = build_validate_structure_result(context)",
|
||||||
|
" elif definition_id == 'validate-metadata':",
|
||||||
|
" result = build_validate_metadata_result(context)",
|
||||||
|
" elif definition_id == 'union-assets':",
|
||||||
|
" result = build_asset_set_result(context, 'union')",
|
||||||
|
" elif definition_id == 'intersect-assets':",
|
||||||
|
" result = build_asset_set_result(context, 'intersect')",
|
||||||
|
" elif definition_id == 'difference-assets':",
|
||||||
|
" result = build_asset_set_result(context, 'difference')",
|
||||||
|
" elif definition_id in {'extract-archive', 'rename-folder', 'export-delivery-package'}:",
|
||||||
|
" result = build_pass_through_result(context, definition_id)",
|
||||||
|
" else:",
|
||||||
|
" print(f\"docker executor processed {task.get('nodeId')} with {task.get('executorConfig', {}).get('image', 'docker://local-simulated')}\")",
|
||||||
|
" result = {",
|
||||||
|
" 'taskId': task.get('id'),",
|
||||||
|
" 'executor': 'docker',",
|
||||||
|
" 'image': task.get('executorConfig', {}).get('image'),",
|
||||||
|
" }",
|
||||||
|
" output_path.write_text(json.dumps({'result': result}))",
|
||||||
|
"",
|
||||||
|
"if __name__ == '__main__':",
|
||||||
|
" main()",
|
||||||
|
].join("\n");
|
||||||
|
}
|
||||||
|
|
||||||
export class DockerExecutor {
|
export class DockerExecutor {
|
||||||
executionCount = 0;
|
executionCount = 0;
|
||||||
|
|
||||||
async execute(task: TaskRecord, context: ExecutionContext): Promise<ExecutorExecutionResult> {
|
async execute(task: TaskRecord, context: ExecutionContext): Promise<ExecutorExecutionResult> {
|
||||||
this.executionCount += 1;
|
this.executionCount += 1;
|
||||||
|
|
||||||
|
const dockerfileContent =
|
||||||
|
typeof task.executorConfig?.dockerfileContent === "string" ? task.executorConfig.dockerfileContent.trim() : "";
|
||||||
|
const requestedImageTag =
|
||||||
|
typeof task.executorConfig?.imageTag === "string" ? task.executorConfig.imageTag.trim() : "";
|
||||||
const image = typeof task.executorConfig?.image === "string" ? task.executorConfig.image.trim() : "";
|
const image = typeof task.executorConfig?.image === "string" ? task.executorConfig.image.trim() : "";
|
||||||
const command = Array.isArray(task.executorConfig?.command)
|
const command = Array.isArray(task.executorConfig?.command)
|
||||||
? task.executorConfig.command.filter((item): item is string => typeof item === "string")
|
? task.executorConfig.command.filter((item): item is string => typeof item === "string")
|
||||||
: [];
|
: [];
|
||||||
|
|
||||||
if (!image) {
|
const resolvedImage = image || (dockerfileContent ? await ensureImageFromDockerfile(dockerfileContent, requestedImageTag) : "");
|
||||||
|
|
||||||
|
if (!resolvedImage) {
|
||||||
return buildFallbackResult(task, "docker://local-simulated", command);
|
return buildFallbackResult(task, "docker://local-simulated", command);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -77,13 +349,22 @@ export class DockerExecutor {
|
|||||||
const tempDir = await mkdtemp(path.join(os.tmpdir(), "emboflow-docker-executor-"));
|
const tempDir = await mkdtemp(path.join(os.tmpdir(), "emboflow-docker-executor-"));
|
||||||
const inputPath = path.join(tempDir, "input.json");
|
const inputPath = path.join(tempDir, "input.json");
|
||||||
const outputPath = path.join(tempDir, "output.json");
|
const outputPath = path.join(tempDir, "output.json");
|
||||||
|
const runnerPath = path.join(tempDir, "runner.py");
|
||||||
|
const { assets: containerAssets, volumeArgs } = buildContainerAssetContext(workdir, context.assets ?? []);
|
||||||
|
const isCustomContainerNode = Boolean(task.executorConfig?.contract) ||
|
||||||
|
String(task.nodeDefinitionId ?? "").startsWith("custom-");
|
||||||
|
|
||||||
await writeFile(
|
await writeFile(
|
||||||
inputPath,
|
inputPath,
|
||||||
JSON.stringify({
|
JSON.stringify({
|
||||||
task,
|
task,
|
||||||
context,
|
context: {
|
||||||
|
...context,
|
||||||
|
assets: containerAssets,
|
||||||
|
},
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
|
await writeFile(runnerPath, createDockerRunner());
|
||||||
|
|
||||||
const dockerArgs = [
|
const dockerArgs = [
|
||||||
"run",
|
"run",
|
||||||
@ -94,6 +375,7 @@ export class DockerExecutor {
|
|||||||
workdir,
|
workdir,
|
||||||
"--volume",
|
"--volume",
|
||||||
`${tempDir}:${workdir}`,
|
`${tempDir}:${workdir}`,
|
||||||
|
...volumeArgs,
|
||||||
"--env",
|
"--env",
|
||||||
`EMBOFLOW_INPUT_PATH=${workdir}/input.json`,
|
`EMBOFLOW_INPUT_PATH=${workdir}/input.json`,
|
||||||
"--env",
|
"--env",
|
||||||
@ -105,51 +387,38 @@ export class DockerExecutor {
|
|||||||
"--env",
|
"--env",
|
||||||
`EMBOFLOW_WORKFLOW_RUN_ID=${context.workflowRunId ?? ""}`,
|
`EMBOFLOW_WORKFLOW_RUN_ID=${context.workflowRunId ?? ""}`,
|
||||||
...envVars.flatMap(([key, value]) => ["--env", `${key}=${value}`]),
|
...envVars.flatMap(([key, value]) => ["--env", `${key}=${value}`]),
|
||||||
image,
|
resolvedImage,
|
||||||
...(command.length > 0 ? command : ["sh", "-lc", "cat \"$EMBOFLOW_INPUT_PATH\" > \"$EMBOFLOW_OUTPUT_PATH\""]),
|
...(command.length > 0
|
||||||
|
? command
|
||||||
|
: isCustomContainerNode
|
||||||
|
? []
|
||||||
|
: ["python3", `${workdir}/runner.py`, `${workdir}/input.json`, `${workdir}/output.json`]),
|
||||||
];
|
];
|
||||||
|
const { exitCode, stdoutLines, stderrLines } = await runProcess("docker", dockerArgs);
|
||||||
const child = spawn("docker", dockerArgs, {
|
|
||||||
stdio: ["ignore", "pipe", "pipe"],
|
|
||||||
});
|
|
||||||
|
|
||||||
let stdout = "";
|
|
||||||
let stderr = "";
|
|
||||||
child.stdout.on("data", (chunk) => {
|
|
||||||
stdout += String(chunk);
|
|
||||||
});
|
|
||||||
child.stderr.on("data", (chunk) => {
|
|
||||||
stderr += String(chunk);
|
|
||||||
});
|
|
||||||
|
|
||||||
const exitCode = await new Promise<number>((resolve, reject) => {
|
|
||||||
child.on("error", reject);
|
|
||||||
child.on("close", (code) => resolve(code ?? 1));
|
|
||||||
});
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (exitCode !== 0) {
|
if (exitCode !== 0) {
|
||||||
throw Object.assign(new Error(`docker executor failed with exit code ${exitCode}`), {
|
throw Object.assign(new Error(`docker executor failed with exit code ${exitCode}`), {
|
||||||
stdoutLines: splitOutputLines(stdout),
|
stdoutLines,
|
||||||
stderrLines: splitOutputLines(stderr),
|
stderrLines,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
let result: unknown = {
|
let result: unknown = {
|
||||||
taskId: task.id,
|
taskId: task.id,
|
||||||
executor: "docker" as const,
|
executor: "docker" as const,
|
||||||
image,
|
image: resolvedImage,
|
||||||
command,
|
command,
|
||||||
};
|
};
|
||||||
if (await fileExists(outputPath)) {
|
if (await fileExists(outputPath)) {
|
||||||
const outputPayload = JSON.parse(await readFile(outputPath, "utf8")) as unknown;
|
const outputPayload = JSON.parse(await readFile(outputPath, "utf8")) as unknown;
|
||||||
result = parseDockerResult(outputPayload);
|
result = validateCustomNodeResult(task, parseDockerResult(outputPayload));
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
result,
|
result,
|
||||||
stdoutLines: splitOutputLines(stdout),
|
stdoutLines,
|
||||||
stderrLines: splitOutputLines(stderr),
|
stderrLines,
|
||||||
};
|
};
|
||||||
} finally {
|
} finally {
|
||||||
await rm(tempDir, { recursive: true, force: true });
|
await rm(tempDir, { recursive: true, force: true });
|
||||||
|
|||||||
@ -8,6 +8,7 @@ import type {
|
|||||||
ExecutionContext,
|
ExecutionContext,
|
||||||
ExecutorExecutionResult,
|
ExecutorExecutionResult,
|
||||||
TaskRecord,
|
TaskRecord,
|
||||||
|
UpstreamExecutionResult,
|
||||||
} from "../contracts/execution-context.ts";
|
} from "../contracts/execution-context.ts";
|
||||||
|
|
||||||
function splitOutputLines(output: string) {
|
function splitOutputLines(output: string) {
|
||||||
@ -53,10 +54,67 @@ function getEffectiveNodeDefinitionId(task: TaskRecord) {
|
|||||||
return task.nodeDefinitionId ?? task.nodeId;
|
return task.nodeDefinitionId ?? task.nodeId;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function dedupeAssetIds(assetIds: string[] = []) {
|
||||||
|
return Array.from(new Set(assetIds.filter((assetId) => typeof assetId === "string" && assetId.length > 0)));
|
||||||
|
}
|
||||||
|
|
||||||
|
function getResultAssetIds(result: UpstreamExecutionResult) {
|
||||||
|
return dedupeAssetIds(result.assetIds ?? []);
|
||||||
|
}
|
||||||
|
|
||||||
|
function createAssetSetResult(
|
||||||
|
operation: "union" | "intersect" | "difference",
|
||||||
|
context: ExecutionContext,
|
||||||
|
): ExecutorExecutionResult {
|
||||||
|
const upstreamResults = context.upstreamResults ?? [];
|
||||||
|
const upstreamSets = upstreamResults.map((result) => getResultAssetIds(result));
|
||||||
|
|
||||||
|
let assetIds: string[] = [];
|
||||||
|
if (operation === "union") {
|
||||||
|
assetIds = dedupeAssetIds(upstreamSets.flat());
|
||||||
|
} else if (operation === "intersect") {
|
||||||
|
assetIds = upstreamSets.length === 0
|
||||||
|
? []
|
||||||
|
: upstreamSets.reduce<string[]>(
|
||||||
|
(current, next) => current.filter((assetId) => next.includes(assetId)),
|
||||||
|
[...upstreamSets[0]!],
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
const [head = [], ...rest] = upstreamSets;
|
||||||
|
const subtract = new Set(rest.flat());
|
||||||
|
assetIds = head.filter((assetId) => !subtract.has(assetId));
|
||||||
|
}
|
||||||
|
|
||||||
|
const assetsById = new Map((context.assets ?? []).map((asset) => [asset.id, asset]));
|
||||||
|
const assets = assetIds
|
||||||
|
.map((assetId) => assetsById.get(assetId))
|
||||||
|
.filter((asset): asset is ExecutionAsset => Boolean(asset))
|
||||||
|
.map((asset) => ({
|
||||||
|
id: asset.id,
|
||||||
|
displayName: asset.displayName,
|
||||||
|
sourcePath: asset.sourcePath,
|
||||||
|
}));
|
||||||
|
|
||||||
|
const operationLabel = operation === "intersect" ? "intersection" : operation;
|
||||||
|
|
||||||
|
return {
|
||||||
|
result: {
|
||||||
|
operation,
|
||||||
|
upstreamCount: upstreamResults.length,
|
||||||
|
assetIds,
|
||||||
|
assetCount: assetIds.length,
|
||||||
|
assets,
|
||||||
|
},
|
||||||
|
stdoutLines: [`${operationLabel} resolved ${assetIds.length} asset${assetIds.length === 1 ? "" : "s"}`],
|
||||||
|
stderrLines: [],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
function createBuiltinSourceResult(context: ExecutionContext): ExecutorExecutionResult {
|
function createBuiltinSourceResult(context: ExecutionContext): ExecutorExecutionResult {
|
||||||
const assets = context.assets ?? [];
|
const assets = context.assets ?? [];
|
||||||
return {
|
return {
|
||||||
result: {
|
result: {
|
||||||
|
assetIds: assets.map((asset) => asset.id),
|
||||||
assetCount: assets.length,
|
assetCount: assets.length,
|
||||||
assets: assets.map((asset) => ({
|
assets: assets.map((asset) => ({
|
||||||
id: asset.id,
|
id: asset.id,
|
||||||
@ -136,6 +194,52 @@ async function createBuiltinValidateResult(context: ExecutionContext): Promise<E
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function createBuiltinMetadataResult(context: ExecutionContext): ExecutorExecutionResult {
|
||||||
|
const assets = context.assets ?? [];
|
||||||
|
const assetSummaries = assets.map((asset) => {
|
||||||
|
const topLevelPaths = asset.topLevelPaths ?? [];
|
||||||
|
const missingRequiredFiles = REQUIRED_DELIVERY_FILES.filter((required) => !topLevelPaths.includes(required));
|
||||||
|
return {
|
||||||
|
id: asset.id,
|
||||||
|
displayName: asset.displayName,
|
||||||
|
missingRequiredFiles,
|
||||||
|
valid: missingRequiredFiles.length === 0,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
const missingRequiredFiles = Array.from(new Set(assetSummaries.flatMap((asset) => asset.missingRequiredFiles)));
|
||||||
|
const valid = assetSummaries.length > 0 && assetSummaries.every((asset) => asset.valid);
|
||||||
|
|
||||||
|
return {
|
||||||
|
result: {
|
||||||
|
assetIds: assets.map((asset) => asset.id),
|
||||||
|
assetCount: assets.length,
|
||||||
|
valid,
|
||||||
|
requiredFiles: [...REQUIRED_DELIVERY_FILES],
|
||||||
|
assets: assetSummaries,
|
||||||
|
...(missingRequiredFiles.length > 0 ? { missingRequiredFiles } : {}),
|
||||||
|
},
|
||||||
|
stdoutLines: [`validated metadata for ${assets.length} asset${assets.length === 1 ? "" : "s"}`],
|
||||||
|
stderrLines: [],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function createBuiltinPassThroughResult(
|
||||||
|
_task: TaskRecord,
|
||||||
|
context: ExecutionContext,
|
||||||
|
operation: string,
|
||||||
|
): ExecutorExecutionResult {
|
||||||
|
const assetIds = dedupeAssetIds(context.assetIds ?? []);
|
||||||
|
return {
|
||||||
|
result: {
|
||||||
|
operation,
|
||||||
|
assetIds,
|
||||||
|
assetCount: assetIds.length,
|
||||||
|
},
|
||||||
|
stdoutLines: [`${operation} processed ${assetIds.length} asset${assetIds.length === 1 ? "" : "s"}`],
|
||||||
|
stderrLines: [],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
export class PythonExecutor {
|
export class PythonExecutor {
|
||||||
executionCount = 0;
|
executionCount = 0;
|
||||||
|
|
||||||
@ -200,8 +304,20 @@ export class PythonExecutor {
|
|||||||
switch (getEffectiveNodeDefinitionId(task)) {
|
switch (getEffectiveNodeDefinitionId(task)) {
|
||||||
case "source-asset":
|
case "source-asset":
|
||||||
return createBuiltinSourceResult(context);
|
return createBuiltinSourceResult(context);
|
||||||
|
case "extract-archive":
|
||||||
|
return createBuiltinPassThroughResult(task, context, "extract-archive");
|
||||||
|
case "rename-folder":
|
||||||
|
return createBuiltinPassThroughResult(task, context, "rename-folder");
|
||||||
case "validate-structure":
|
case "validate-structure":
|
||||||
return createBuiltinValidateResult(context);
|
return createBuiltinValidateResult(context);
|
||||||
|
case "validate-metadata":
|
||||||
|
return createBuiltinMetadataResult(context);
|
||||||
|
case "union-assets":
|
||||||
|
return createAssetSetResult("union", context);
|
||||||
|
case "intersect-assets":
|
||||||
|
return createAssetSetResult("intersect", context);
|
||||||
|
case "difference-assets":
|
||||||
|
return createAssetSetResult("difference", context);
|
||||||
default:
|
default:
|
||||||
return createDefaultResult(task);
|
return createDefaultResult(task);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -5,6 +5,7 @@ import type { Db } from "mongodb";
|
|||||||
import type {
|
import type {
|
||||||
CodeHookSpec,
|
CodeHookSpec,
|
||||||
ExecutionAsset,
|
ExecutionAsset,
|
||||||
|
ExecutionDataset,
|
||||||
ExecutorType,
|
ExecutorType,
|
||||||
NodeRuntimeConfig,
|
NodeRuntimeConfig,
|
||||||
RunExecutionSummary,
|
RunExecutionSummary,
|
||||||
@ -12,6 +13,7 @@ import type {
|
|||||||
TaskRecord,
|
TaskRecord,
|
||||||
TaskStatusCounts,
|
TaskStatusCounts,
|
||||||
TaskStatus,
|
TaskStatus,
|
||||||
|
WorkflowInputBinding,
|
||||||
} from "../contracts/execution-context.ts";
|
} from "../contracts/execution-context.ts";
|
||||||
|
|
||||||
type WorkflowRunDocument = {
|
type WorkflowRunDocument = {
|
||||||
@ -20,7 +22,9 @@ type WorkflowRunDocument = {
|
|||||||
workflowVersionId: string;
|
workflowVersionId: string;
|
||||||
status: "queued" | "running" | "success" | "failed" | "cancelled";
|
status: "queued" | "running" | "success" | "failed" | "cancelled";
|
||||||
triggeredBy: string;
|
triggeredBy: string;
|
||||||
|
inputBindings?: WorkflowInputBinding[];
|
||||||
assetIds: string[];
|
assetIds: string[];
|
||||||
|
datasetIds?: string[];
|
||||||
runtimeSnapshot?: {
|
runtimeSnapshot?: {
|
||||||
selectedPreset?: string;
|
selectedPreset?: string;
|
||||||
nodeBindings?: Record<string, string>;
|
nodeBindings?: Record<string, string>;
|
||||||
@ -59,7 +63,9 @@ type RunTaskDocument = {
|
|||||||
artifactTitle?: string;
|
artifactTitle?: string;
|
||||||
status: TaskStatus;
|
status: TaskStatus;
|
||||||
attempt: number;
|
attempt: number;
|
||||||
|
inputBindings?: WorkflowInputBinding[];
|
||||||
assetIds: string[];
|
assetIds: string[];
|
||||||
|
datasetIds?: string[];
|
||||||
upstreamNodeIds: string[];
|
upstreamNodeIds: string[];
|
||||||
outputArtifactIds: string[];
|
outputArtifactIds: string[];
|
||||||
errorMessage?: string;
|
errorMessage?: string;
|
||||||
@ -84,6 +90,16 @@ type AssetDocument = {
|
|||||||
summary?: Record<string, unknown>;
|
summary?: Record<string, unknown>;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
type DatasetDocument = {
|
||||||
|
_id: string;
|
||||||
|
name: string;
|
||||||
|
storagePath?: string;
|
||||||
|
sourceAssetIds?: string[];
|
||||||
|
latestVersionId?: string;
|
||||||
|
latestVersionNumber?: number;
|
||||||
|
summary?: Record<string, unknown>;
|
||||||
|
};
|
||||||
|
|
||||||
function nowIso() {
|
function nowIso() {
|
||||||
return new Date().toISOString();
|
return new Date().toISOString();
|
||||||
}
|
}
|
||||||
@ -103,7 +119,9 @@ function toTaskRecord(task: RunTaskDocument): TaskRecord {
|
|||||||
artifactTitle: task.artifactTitle,
|
artifactTitle: task.artifactTitle,
|
||||||
status: task.status,
|
status: task.status,
|
||||||
attempt: task.attempt,
|
attempt: task.attempt,
|
||||||
|
inputBindings: task.inputBindings ?? [],
|
||||||
assetIds: task.assetIds,
|
assetIds: task.assetIds,
|
||||||
|
datasetIds: task.datasetIds ?? [],
|
||||||
upstreamNodeIds: task.upstreamNodeIds,
|
upstreamNodeIds: task.upstreamNodeIds,
|
||||||
outputArtifactIds: task.outputArtifactIds,
|
outputArtifactIds: task.outputArtifactIds,
|
||||||
errorMessage: task.errorMessage,
|
errorMessage: task.errorMessage,
|
||||||
@ -294,6 +312,35 @@ export class MongoWorkerStore {
|
|||||||
.filter((asset): asset is ExecutionAsset => Boolean(asset));
|
.filter((asset): asset is ExecutionAsset => Boolean(asset));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async getDatasetsByIds(datasetIds: string[]): Promise<ExecutionDataset[]> {
|
||||||
|
if (datasetIds.length === 0) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const datasets = await this.db
|
||||||
|
.collection<DatasetDocument>("datasets")
|
||||||
|
.find({ _id: { $in: datasetIds } })
|
||||||
|
.toArray();
|
||||||
|
const datasetMap = new Map(
|
||||||
|
datasets.map((dataset) => [
|
||||||
|
dataset._id,
|
||||||
|
{
|
||||||
|
id: dataset._id,
|
||||||
|
name: dataset.name,
|
||||||
|
storagePath: dataset.storagePath,
|
||||||
|
sourceAssetIds: dataset.sourceAssetIds ?? [],
|
||||||
|
latestVersionId: dataset.latestVersionId,
|
||||||
|
latestVersionNumber: dataset.latestVersionNumber,
|
||||||
|
summary: dataset.summary ?? {},
|
||||||
|
} satisfies ExecutionDataset,
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
|
return datasetIds
|
||||||
|
.map((datasetId) => datasetMap.get(datasetId))
|
||||||
|
.filter((dataset): dataset is ExecutionDataset => Boolean(dataset));
|
||||||
|
}
|
||||||
|
|
||||||
async createTaskArtifact(task: TaskRecord, payload: Record<string, unknown>) {
|
async createTaskArtifact(task: TaskRecord, payload: Record<string, unknown>) {
|
||||||
const artifact = {
|
const artifact = {
|
||||||
_id: `artifact-${randomUUID()}`,
|
_id: `artifact-${randomUUID()}`,
|
||||||
@ -319,6 +366,7 @@ export class MongoWorkerStore {
|
|||||||
async markTaskSuccess(
|
async markTaskSuccess(
|
||||||
taskId: string,
|
taskId: string,
|
||||||
input: {
|
input: {
|
||||||
|
assetIds: string[];
|
||||||
finishedAt: string;
|
finishedAt: string;
|
||||||
durationMs: number;
|
durationMs: number;
|
||||||
summary: TaskExecutionSummary;
|
summary: TaskExecutionSummary;
|
||||||
@ -332,6 +380,7 @@ export class MongoWorkerStore {
|
|||||||
{ _id: taskId },
|
{ _id: taskId },
|
||||||
{
|
{
|
||||||
$set: {
|
$set: {
|
||||||
|
assetIds: input.assetIds,
|
||||||
status: "success",
|
status: "success",
|
||||||
finishedAt: input.finishedAt,
|
finishedAt: input.finishedAt,
|
||||||
durationMs: input.durationMs,
|
durationMs: input.durationMs,
|
||||||
@ -354,6 +403,7 @@ export class MongoWorkerStore {
|
|||||||
taskId: string,
|
taskId: string,
|
||||||
errorMessage: string,
|
errorMessage: string,
|
||||||
input: {
|
input: {
|
||||||
|
assetIds: string[];
|
||||||
finishedAt: string;
|
finishedAt: string;
|
||||||
durationMs: number;
|
durationMs: number;
|
||||||
summary: TaskExecutionSummary;
|
summary: TaskExecutionSummary;
|
||||||
@ -366,6 +416,7 @@ export class MongoWorkerStore {
|
|||||||
{ _id: taskId },
|
{ _id: taskId },
|
||||||
{
|
{
|
||||||
$set: {
|
$set: {
|
||||||
|
assetIds: input.assetIds,
|
||||||
status: "failed",
|
status: "failed",
|
||||||
errorMessage,
|
errorMessage,
|
||||||
finishedAt: input.finishedAt,
|
finishedAt: input.finishedAt,
|
||||||
|
|||||||
@ -3,6 +3,7 @@ import { HttpExecutor } from "../executors/http-executor.ts";
|
|||||||
import { PythonExecutor } from "../executors/python-executor.ts";
|
import { PythonExecutor } from "../executors/python-executor.ts";
|
||||||
import type {
|
import type {
|
||||||
ExecutionContext,
|
ExecutionContext,
|
||||||
|
UpstreamExecutionResult,
|
||||||
ExecutorExecutionResult,
|
ExecutorExecutionResult,
|
||||||
ExecutorType,
|
ExecutorType,
|
||||||
TaskExecutionSummary,
|
TaskExecutionSummary,
|
||||||
@ -38,7 +39,7 @@ export class WorkerRuntime {
|
|||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
const startedAt = task.startedAt ?? new Date().toISOString();
|
const startedAt = task.startedAt ?? new Date().toISOString();
|
||||||
const assets = await this.store.getAssetsByIds(task.assetIds ?? []);
|
const executionInput = await this.resolveExecutionInput(task);
|
||||||
|
|
||||||
const context: ExecutionContext = {
|
const context: ExecutionContext = {
|
||||||
taskId: task.id,
|
taskId: task.id,
|
||||||
@ -46,8 +47,12 @@ export class WorkerRuntime {
|
|||||||
workflowVersionId: task.workflowVersionId,
|
workflowVersionId: task.workflowVersionId,
|
||||||
nodeId: task.nodeId,
|
nodeId: task.nodeId,
|
||||||
nodeDefinitionId: task.nodeDefinitionId,
|
nodeDefinitionId: task.nodeDefinitionId,
|
||||||
assetIds: task.assetIds,
|
inputBindings: task.inputBindings,
|
||||||
assets,
|
assetIds: executionInput.assetIds,
|
||||||
|
assets: executionInput.assets,
|
||||||
|
datasetIds: executionInput.datasetIds,
|
||||||
|
datasets: executionInput.datasets,
|
||||||
|
upstreamResults: executionInput.upstreamResults,
|
||||||
};
|
};
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@ -59,19 +64,22 @@ export class WorkerRuntime {
|
|||||||
nodeType: task.nodeType,
|
nodeType: task.nodeType,
|
||||||
nodeDefinitionId: task.nodeDefinitionId,
|
nodeDefinitionId: task.nodeDefinitionId,
|
||||||
executorType: task.executorType,
|
executorType: task.executorType,
|
||||||
assetIds: task.assetIds,
|
inputBindings: task.inputBindings,
|
||||||
|
assetIds: executionInput.assetIds,
|
||||||
|
datasetIds: executionInput.datasetIds,
|
||||||
result: execution.result,
|
result: execution.result,
|
||||||
});
|
});
|
||||||
const finishedAt = new Date().toISOString();
|
const finishedAt = new Date().toISOString();
|
||||||
const summary: TaskExecutionSummary = {
|
const summary: TaskExecutionSummary = {
|
||||||
outcome: "success",
|
outcome: "success",
|
||||||
executorType: task.executorType,
|
executorType: task.executorType,
|
||||||
assetCount: task.assetIds?.length ?? 0,
|
assetCount: executionInput.assetIds.length,
|
||||||
artifactIds: [artifact._id],
|
artifactIds: [artifact._id],
|
||||||
stdoutLineCount: execution.stdoutLines.length,
|
stdoutLineCount: execution.stdoutLines.length,
|
||||||
stderrLineCount: execution.stderrLines.length,
|
stderrLineCount: execution.stderrLines.length,
|
||||||
};
|
};
|
||||||
await this.store.markTaskSuccess(task.id, {
|
await this.store.markTaskSuccess(task.id, {
|
||||||
|
assetIds: executionInput.assetIds,
|
||||||
finishedAt,
|
finishedAt,
|
||||||
durationMs: this.computeDurationMs(startedAt, finishedAt),
|
durationMs: this.computeDurationMs(startedAt, finishedAt),
|
||||||
summary,
|
summary,
|
||||||
@ -95,13 +103,14 @@ export class WorkerRuntime {
|
|||||||
const summary: TaskExecutionSummary = {
|
const summary: TaskExecutionSummary = {
|
||||||
outcome: "failed",
|
outcome: "failed",
|
||||||
executorType: task.executorType,
|
executorType: task.executorType,
|
||||||
assetCount: task.assetIds?.length ?? 0,
|
assetCount: executionInput.assetIds.length,
|
||||||
artifactIds: [],
|
artifactIds: [],
|
||||||
stdoutLineCount: executionError.stdoutLines.length,
|
stdoutLineCount: executionError.stdoutLines.length,
|
||||||
stderrLineCount: executionError.stderrLines.length,
|
stderrLineCount: executionError.stderrLines.length,
|
||||||
errorMessage: executionError.message,
|
errorMessage: executionError.message,
|
||||||
};
|
};
|
||||||
await this.store.markTaskFailed(task.id, executionError.message, {
|
await this.store.markTaskFailed(task.id, executionError.message, {
|
||||||
|
assetIds: executionInput.assetIds,
|
||||||
finishedAt,
|
finishedAt,
|
||||||
durationMs: this.computeDurationMs(startedAt, finishedAt),
|
durationMs: this.computeDurationMs(startedAt, finishedAt),
|
||||||
summary,
|
summary,
|
||||||
@ -120,6 +129,56 @@ export class WorkerRuntime {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private async resolveExecutionInput(task: TaskRecord) {
|
||||||
|
const upstreamResults = await this.collectUpstreamResults(task);
|
||||||
|
const assetIds = this.resolveEffectiveAssetIds(task, upstreamResults);
|
||||||
|
const assets = await this.store.getAssetsByIds(assetIds);
|
||||||
|
|
||||||
|
return {
|
||||||
|
assetIds,
|
||||||
|
assets,
|
||||||
|
datasetIds: task.datasetIds ?? [],
|
||||||
|
datasets: await this.store.getDatasetsByIds(task.datasetIds ?? []),
|
||||||
|
upstreamResults,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private async collectUpstreamResults(task: TaskRecord): Promise<UpstreamExecutionResult[]> {
|
||||||
|
if (!task.workflowRunId || (task.upstreamNodeIds?.length ?? 0) === 0) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const runTasks = await this.store.listRunTasks(task.workflowRunId);
|
||||||
|
const directUpstreamNodeIds = new Set(task.upstreamNodeIds ?? []);
|
||||||
|
return runTasks
|
||||||
|
.filter((candidate) => candidate.status === "success" && directUpstreamNodeIds.has(candidate.nodeId))
|
||||||
|
.map((candidate) => ({
|
||||||
|
taskId: candidate.id,
|
||||||
|
nodeId: candidate.nodeId,
|
||||||
|
nodeDefinitionId: candidate.nodeDefinitionId,
|
||||||
|
assetIds: this.readAssetIdsFromResult(candidate.lastResultPreview) ?? candidate.assetIds ?? [],
|
||||||
|
result: candidate.lastResultPreview,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
private resolveEffectiveAssetIds(task: TaskRecord, upstreamResults: UpstreamExecutionResult[]) {
|
||||||
|
if (upstreamResults.length === 1) {
|
||||||
|
return this.dedupeAssetIds(upstreamResults[0].assetIds);
|
||||||
|
}
|
||||||
|
return this.dedupeAssetIds(task.assetIds ?? []);
|
||||||
|
}
|
||||||
|
|
||||||
|
private readAssetIdsFromResult(result: Record<string, unknown> | undefined) {
|
||||||
|
if (!result || !Array.isArray(result.assetIds)) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return result.assetIds.filter((assetId): assetId is string => typeof assetId === "string");
|
||||||
|
}
|
||||||
|
|
||||||
|
private dedupeAssetIds(assetIds: string[]) {
|
||||||
|
return Array.from(new Set(assetIds.filter((assetId) => assetId.length > 0)));
|
||||||
|
}
|
||||||
|
|
||||||
private computeDurationMs(startedAt: string, finishedAt: string) {
|
private computeDurationMs(startedAt: string, finishedAt: string) {
|
||||||
const duration = Date.parse(finishedAt) - Date.parse(startedAt);
|
const duration = Date.parse(finishedAt) - Date.parse(startedAt);
|
||||||
return Number.isFinite(duration) && duration >= 0 ? duration : 0;
|
return Number.isFinite(duration) && duration >= 0 ? duration : 0;
|
||||||
|
|||||||
@ -584,6 +584,78 @@ test("worker executes a queued docker task inside a real container", {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("worker executes built-in docker source nodes when codeHookSpec is null", {
|
||||||
|
skip: !hasDockerRuntime(),
|
||||||
|
}, async (t) => {
|
||||||
|
ensureDockerImage("python:3.11-alpine");
|
||||||
|
const fixture = await createRuntimeFixture("emboflow-worker-built-in-docker-source");
|
||||||
|
t.after(async () => {
|
||||||
|
await fixture.close();
|
||||||
|
});
|
||||||
|
|
||||||
|
await fixture.db.collection("assets").insertOne({
|
||||||
|
_id: "asset-built-in-docker-source",
|
||||||
|
workspaceId: "workspace-1",
|
||||||
|
projectId: "project-1",
|
||||||
|
type: "folder",
|
||||||
|
sourceType: "registered_path",
|
||||||
|
displayName: "Built-in Docker Source Asset",
|
||||||
|
sourcePath: "/tmp/built-in-docker-source",
|
||||||
|
status: "probed",
|
||||||
|
storageRef: {},
|
||||||
|
topLevelPaths: ["meta.json"],
|
||||||
|
detectedFormats: ["delivery_package"],
|
||||||
|
fileCount: 1,
|
||||||
|
summary: {},
|
||||||
|
createdBy: "local-user",
|
||||||
|
createdAt: new Date().toISOString(),
|
||||||
|
updatedAt: new Date().toISOString(),
|
||||||
|
});
|
||||||
|
|
||||||
|
await fixture.db.collection("workflow_runs").insertOne({
|
||||||
|
_id: "run-built-in-docker-source",
|
||||||
|
workflowDefinitionId: "workflow-built-in-docker-source",
|
||||||
|
workflowVersionId: "workflow-built-in-docker-source-v1",
|
||||||
|
status: "queued",
|
||||||
|
triggeredBy: "local-user",
|
||||||
|
assetIds: ["asset-built-in-docker-source"],
|
||||||
|
createdAt: new Date().toISOString(),
|
||||||
|
updatedAt: new Date().toISOString(),
|
||||||
|
});
|
||||||
|
|
||||||
|
await fixture.db.collection("run_tasks").insertOne({
|
||||||
|
_id: "task-built-in-docker-source",
|
||||||
|
workflowRunId: "run-built-in-docker-source",
|
||||||
|
workflowVersionId: "workflow-built-in-docker-source-v1",
|
||||||
|
nodeId: "source-asset",
|
||||||
|
nodeDefinitionId: "source-asset",
|
||||||
|
nodeType: "source",
|
||||||
|
executorType: "docker",
|
||||||
|
executorConfig: {
|
||||||
|
image: "python:3.11-alpine",
|
||||||
|
networkMode: "none",
|
||||||
|
},
|
||||||
|
codeHookSpec: null,
|
||||||
|
status: "queued",
|
||||||
|
attempt: 1,
|
||||||
|
assetIds: ["asset-built-in-docker-source"],
|
||||||
|
upstreamNodeIds: [],
|
||||||
|
outputArtifactIds: [],
|
||||||
|
createdAt: new Date().toISOString(),
|
||||||
|
updatedAt: new Date().toISOString(),
|
||||||
|
});
|
||||||
|
|
||||||
|
await fixture.runtime.runNextTask();
|
||||||
|
|
||||||
|
const task = await fixture.store.getRunTask("task-built-in-docker-source");
|
||||||
|
const artifact = await fixture.db.collection("artifacts").findOne({ producerId: "task-built-in-docker-source" });
|
||||||
|
|
||||||
|
assert.equal(task?.status, "success");
|
||||||
|
assert.deepEqual(task?.stderrLines, []);
|
||||||
|
assert.deepEqual(task?.stdoutLines, ["loaded 1 bound asset"]);
|
||||||
|
assert.deepEqual((artifact?.payload as { result?: { assetCount?: number } } | undefined)?.result?.assetCount, 1);
|
||||||
|
});
|
||||||
|
|
||||||
test("worker loads bound asset metadata into the execution context for built-in source nodes", async (t) => {
|
test("worker loads bound asset metadata into the execution context for built-in source nodes", async (t) => {
|
||||||
let capturedContext: ExecutionContext | null = null;
|
let capturedContext: ExecutionContext | null = null;
|
||||||
const fixture = await createRuntimeFixture("emboflow-worker-source-context", {
|
const fixture = await createRuntimeFixture("emboflow-worker-source-context", {
|
||||||
@ -738,3 +810,315 @@ test("worker validates delivery structure against the bound asset path for valid
|
|||||||
videoFileCount: 1,
|
videoFileCount: 1,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("worker applies intersect-assets and narrows the downstream effective asset set", async (t) => {
|
||||||
|
const sourceDirA = await mkdtemp(path.join(os.tmpdir(), "emboflow-worker-intersect-a-"));
|
||||||
|
const sourceDirB = await mkdtemp(path.join(os.tmpdir(), "emboflow-worker-intersect-b-"));
|
||||||
|
await mkdir(path.join(sourceDirA, "DJI_A"));
|
||||||
|
await mkdir(path.join(sourceDirB, "DJI_B"));
|
||||||
|
for (const root of [sourceDirA, sourceDirB]) {
|
||||||
|
await writeFile(path.join(root, "meta.json"), "{}");
|
||||||
|
await writeFile(path.join(root, "intrinsics.json"), "{}");
|
||||||
|
await writeFile(path.join(root, "video_meta.json"), "{}");
|
||||||
|
}
|
||||||
|
await writeFile(path.join(sourceDirA, "DJI_A", "A.mp4"), "");
|
||||||
|
await writeFile(path.join(sourceDirB, "DJI_B", "B.mp4"), "");
|
||||||
|
|
||||||
|
const fixture = await createRuntimeFixture("emboflow-worker-intersect-assets");
|
||||||
|
t.after(async () => {
|
||||||
|
await fixture.close();
|
||||||
|
});
|
||||||
|
|
||||||
|
await fixture.db.collection("assets").insertMany([
|
||||||
|
{
|
||||||
|
_id: "asset-intersect-a",
|
||||||
|
workspaceId: "workspace-1",
|
||||||
|
projectId: "project-1",
|
||||||
|
type: "folder",
|
||||||
|
sourceType: "registered_path",
|
||||||
|
displayName: "Intersect Asset A",
|
||||||
|
sourcePath: sourceDirA,
|
||||||
|
status: "probed",
|
||||||
|
storageRef: {},
|
||||||
|
topLevelPaths: ["DJI_A", "meta.json", "intrinsics.json", "video_meta.json"],
|
||||||
|
detectedFormats: ["delivery_package"],
|
||||||
|
fileCount: 4,
|
||||||
|
summary: { kind: "delivery_package" },
|
||||||
|
createdBy: "local-user",
|
||||||
|
createdAt: new Date().toISOString(),
|
||||||
|
updatedAt: new Date().toISOString(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
_id: "asset-intersect-b",
|
||||||
|
workspaceId: "workspace-1",
|
||||||
|
projectId: "project-1",
|
||||||
|
type: "folder",
|
||||||
|
sourceType: "registered_path",
|
||||||
|
displayName: "Intersect Asset B",
|
||||||
|
sourcePath: sourceDirB,
|
||||||
|
status: "probed",
|
||||||
|
storageRef: {},
|
||||||
|
topLevelPaths: ["DJI_B", "meta.json", "intrinsics.json", "video_meta.json"],
|
||||||
|
detectedFormats: ["delivery_package"],
|
||||||
|
fileCount: 4,
|
||||||
|
summary: { kind: "delivery_package" },
|
||||||
|
createdBy: "local-user",
|
||||||
|
createdAt: new Date().toISOString(),
|
||||||
|
updatedAt: new Date().toISOString(),
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
await fixture.db.collection("workflow_runs").insertOne({
|
||||||
|
_id: "run-intersect-assets",
|
||||||
|
workflowDefinitionId: "workflow-intersect-assets",
|
||||||
|
workflowVersionId: "workflow-intersect-assets-v1",
|
||||||
|
status: "queued",
|
||||||
|
triggeredBy: "local-user",
|
||||||
|
assetIds: ["asset-intersect-a", "asset-intersect-b"],
|
||||||
|
createdAt: new Date().toISOString(),
|
||||||
|
updatedAt: new Date().toISOString(),
|
||||||
|
});
|
||||||
|
|
||||||
|
await fixture.db.collection("run_tasks").insertMany([
|
||||||
|
{
|
||||||
|
_id: "task-upstream-a",
|
||||||
|
workflowRunId: "run-intersect-assets",
|
||||||
|
workflowVersionId: "workflow-intersect-assets-v1",
|
||||||
|
nodeId: "source-assets-a",
|
||||||
|
nodeDefinitionId: "source-asset",
|
||||||
|
nodeType: "source",
|
||||||
|
executorType: "python",
|
||||||
|
status: "success",
|
||||||
|
attempt: 1,
|
||||||
|
assetIds: ["asset-intersect-a", "asset-intersect-b"],
|
||||||
|
upstreamNodeIds: [],
|
||||||
|
outputArtifactIds: [],
|
||||||
|
lastResultPreview: { assetIds: ["asset-intersect-a", "asset-intersect-b"] },
|
||||||
|
createdAt: new Date().toISOString(),
|
||||||
|
updatedAt: new Date().toISOString(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
_id: "task-upstream-b",
|
||||||
|
workflowRunId: "run-intersect-assets",
|
||||||
|
workflowVersionId: "workflow-intersect-assets-v1",
|
||||||
|
nodeId: "source-assets-b",
|
||||||
|
nodeDefinitionId: "source-asset",
|
||||||
|
nodeType: "source",
|
||||||
|
executorType: "python",
|
||||||
|
status: "success",
|
||||||
|
attempt: 1,
|
||||||
|
assetIds: ["asset-intersect-b"],
|
||||||
|
upstreamNodeIds: [],
|
||||||
|
outputArtifactIds: [],
|
||||||
|
lastResultPreview: { assetIds: ["asset-intersect-b"] },
|
||||||
|
createdAt: new Date().toISOString(),
|
||||||
|
updatedAt: new Date().toISOString(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
_id: "task-intersect-assets",
|
||||||
|
workflowRunId: "run-intersect-assets",
|
||||||
|
workflowVersionId: "workflow-intersect-assets-v1",
|
||||||
|
nodeId: "intersect-assets-1",
|
||||||
|
nodeDefinitionId: "intersect-assets",
|
||||||
|
nodeType: "utility",
|
||||||
|
executorType: "python",
|
||||||
|
status: "queued",
|
||||||
|
attempt: 1,
|
||||||
|
assetIds: ["asset-intersect-a", "asset-intersect-b"],
|
||||||
|
upstreamNodeIds: ["source-assets-a", "source-assets-b"],
|
||||||
|
outputArtifactIds: [],
|
||||||
|
createdAt: new Date().toISOString(),
|
||||||
|
updatedAt: new Date().toISOString(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
_id: "task-downstream-validate",
|
||||||
|
workflowRunId: "run-intersect-assets",
|
||||||
|
workflowVersionId: "workflow-intersect-assets-v1",
|
||||||
|
nodeId: "validate-structure",
|
||||||
|
nodeDefinitionId: "validate-structure",
|
||||||
|
nodeType: "inspect",
|
||||||
|
executorType: "python",
|
||||||
|
status: "pending",
|
||||||
|
attempt: 1,
|
||||||
|
assetIds: ["asset-intersect-a", "asset-intersect-b"],
|
||||||
|
upstreamNodeIds: ["intersect-assets-1"],
|
||||||
|
outputArtifactIds: [],
|
||||||
|
createdAt: new Date().toISOString(),
|
||||||
|
updatedAt: new Date().toISOString(),
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
await fixture.runtime.runNextTask();
|
||||||
|
const intersectTask = await fixture.store.getRunTask("task-intersect-assets");
|
||||||
|
const queuedValidate = await fixture.store.getRunTask("task-downstream-validate");
|
||||||
|
|
||||||
|
assert.equal(intersectTask?.status, "success");
|
||||||
|
assert.deepEqual(intersectTask?.lastResultPreview?.assetIds, ["asset-intersect-b"]);
|
||||||
|
assert.match(intersectTask?.stdoutLines?.[0] ?? "", /intersection resolved 1 asset/i);
|
||||||
|
assert.equal(queuedValidate?.status, "queued");
|
||||||
|
|
||||||
|
await fixture.runtime.runNextTask();
|
||||||
|
const validateTask = await fixture.store.getRunTask("task-downstream-validate");
|
||||||
|
|
||||||
|
assert.equal(validateTask?.status, "success");
|
||||||
|
assert.equal(validateTask?.summary?.assetCount, 1);
|
||||||
|
assert.deepEqual(validateTask?.lastResultPreview, {
|
||||||
|
assetCount: 1,
|
||||||
|
valid: true,
|
||||||
|
requiredFiles: ["meta.json", "intrinsics.json", "video_meta.json"],
|
||||||
|
videoFileCount: 1,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test("worker executes built-in union-assets inside docker when docker is available", async (t) => {
|
||||||
|
if (!hasDockerRuntime()) {
|
||||||
|
t.diagnostic("docker runtime unavailable; skipping built-in docker union-assets test");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
ensureDockerImage("python:3.11-alpine");
|
||||||
|
|
||||||
|
const fixture = await createRuntimeFixture("emboflow-worker-docker-union-assets");
|
||||||
|
t.after(async () => {
|
||||||
|
await fixture.close();
|
||||||
|
});
|
||||||
|
|
||||||
|
await fixture.db.collection("workflow_runs").insertOne({
|
||||||
|
_id: "run-docker-union-assets",
|
||||||
|
workflowDefinitionId: "workflow-docker-union-assets",
|
||||||
|
workflowVersionId: "workflow-docker-union-assets-v1",
|
||||||
|
status: "queued",
|
||||||
|
triggeredBy: "local-user",
|
||||||
|
assetIds: ["asset-union-a", "asset-union-b"],
|
||||||
|
createdAt: new Date().toISOString(),
|
||||||
|
updatedAt: new Date().toISOString(),
|
||||||
|
});
|
||||||
|
|
||||||
|
await fixture.db.collection("run_tasks").insertMany([
|
||||||
|
{
|
||||||
|
_id: "task-union-upstream-a",
|
||||||
|
workflowRunId: "run-docker-union-assets",
|
||||||
|
workflowVersionId: "workflow-docker-union-assets-v1",
|
||||||
|
nodeId: "source-assets-a",
|
||||||
|
nodeDefinitionId: "source-asset",
|
||||||
|
nodeType: "source",
|
||||||
|
executorType: "python",
|
||||||
|
status: "success",
|
||||||
|
attempt: 1,
|
||||||
|
assetIds: ["asset-union-a"],
|
||||||
|
upstreamNodeIds: [],
|
||||||
|
outputArtifactIds: [],
|
||||||
|
lastResultPreview: { assetIds: ["asset-union-a"] },
|
||||||
|
createdAt: new Date().toISOString(),
|
||||||
|
updatedAt: new Date().toISOString(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
_id: "task-union-upstream-b",
|
||||||
|
workflowRunId: "run-docker-union-assets",
|
||||||
|
workflowVersionId: "workflow-docker-union-assets-v1",
|
||||||
|
nodeId: "source-assets-b",
|
||||||
|
nodeDefinitionId: "source-asset",
|
||||||
|
nodeType: "source",
|
||||||
|
executorType: "python",
|
||||||
|
status: "success",
|
||||||
|
attempt: 1,
|
||||||
|
assetIds: ["asset-union-b"],
|
||||||
|
upstreamNodeIds: [],
|
||||||
|
outputArtifactIds: [],
|
||||||
|
lastResultPreview: { assetIds: ["asset-union-b"] },
|
||||||
|
createdAt: new Date().toISOString(),
|
||||||
|
updatedAt: new Date().toISOString(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
_id: "task-union-docker",
|
||||||
|
workflowRunId: "run-docker-union-assets",
|
||||||
|
workflowVersionId: "workflow-docker-union-assets-v1",
|
||||||
|
nodeId: "union-assets-1",
|
||||||
|
nodeDefinitionId: "union-assets",
|
||||||
|
nodeType: "utility",
|
||||||
|
executorType: "docker",
|
||||||
|
executorConfig: {
|
||||||
|
image: "python:3.11-alpine",
|
||||||
|
},
|
||||||
|
status: "queued",
|
||||||
|
attempt: 1,
|
||||||
|
assetIds: ["asset-union-a", "asset-union-b"],
|
||||||
|
upstreamNodeIds: ["source-assets-a", "source-assets-b"],
|
||||||
|
outputArtifactIds: [],
|
||||||
|
createdAt: new Date().toISOString(),
|
||||||
|
updatedAt: new Date().toISOString(),
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
await fixture.runtime.runNextTask();
|
||||||
|
const task = await fixture.store.getRunTask("task-union-docker");
|
||||||
|
|
||||||
|
assert.equal(task?.status, "success");
|
||||||
|
assert.equal(task?.summary?.executorType, "docker");
|
||||||
|
assert.match(task?.stdoutLines?.[0] ?? "", /union resolved 2 assets/i);
|
||||||
|
assert.deepEqual(task?.lastResultPreview?.assetIds, ["asset-union-a", "asset-union-b"]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("worker builds and executes a custom dockerfile node when docker is available", async (t) => {
|
||||||
|
if (!hasDockerRuntime()) {
|
||||||
|
t.diagnostic("docker runtime unavailable; skipping custom dockerfile node test");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
ensureDockerImage("python:3.11-alpine");
|
||||||
|
|
||||||
|
const fixture = await createRuntimeFixture("emboflow-worker-custom-dockerfile-node");
|
||||||
|
t.after(async () => {
|
||||||
|
await fixture.close();
|
||||||
|
});
|
||||||
|
|
||||||
|
await fixture.db.collection("workflow_runs").insertOne({
|
||||||
|
_id: "run-custom-dockerfile-node",
|
||||||
|
workflowDefinitionId: "workflow-custom-dockerfile-node",
|
||||||
|
workflowVersionId: "workflow-custom-dockerfile-node-v1",
|
||||||
|
status: "queued",
|
||||||
|
triggeredBy: "local-user",
|
||||||
|
assetIds: ["asset-custom-1", "asset-custom-2"],
|
||||||
|
createdAt: new Date().toISOString(),
|
||||||
|
updatedAt: new Date().toISOString(),
|
||||||
|
});
|
||||||
|
|
||||||
|
await fixture.db.collection("run_tasks").insertOne({
|
||||||
|
_id: "task-custom-dockerfile-node",
|
||||||
|
workflowRunId: "run-custom-dockerfile-node",
|
||||||
|
workflowVersionId: "workflow-custom-dockerfile-node-v1",
|
||||||
|
nodeId: "custom-node-merge-assets-1",
|
||||||
|
nodeDefinitionId: "custom-merge-assets",
|
||||||
|
nodeType: "utility",
|
||||||
|
executorType: "docker",
|
||||||
|
executorConfig: {
|
||||||
|
imageTag: "emboflow-test/custom-merge-assets:latest",
|
||||||
|
dockerfileContent: [
|
||||||
|
"FROM python:3.11-alpine",
|
||||||
|
"CMD [\"python3\", \"-c\", \"import json,os,pathlib; payload=json.loads(pathlib.Path(os.environ['EMBOFLOW_INPUT_PATH']).read_text()); asset_ids=payload['context'].get('assetIds', []); pathlib.Path(os.environ['EMBOFLOW_OUTPUT_PATH']).write_text(json.dumps({'result': {'assetIds': asset_ids, 'assetCount': len(asset_ids), 'kind': 'custom-dockerfile'}})); print(f'custom dockerfile handled {len(asset_ids)} assets')\"]",
|
||||||
|
].join("\n"),
|
||||||
|
contract: {
|
||||||
|
inputMode: "single_asset_set",
|
||||||
|
outputMode: "asset_set",
|
||||||
|
artifactType: "json",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
status: "queued",
|
||||||
|
attempt: 1,
|
||||||
|
assetIds: ["asset-custom-1", "asset-custom-2"],
|
||||||
|
upstreamNodeIds: [],
|
||||||
|
outputArtifactIds: [],
|
||||||
|
createdAt: new Date().toISOString(),
|
||||||
|
updatedAt: new Date().toISOString(),
|
||||||
|
});
|
||||||
|
|
||||||
|
await fixture.runtime.runNextTask();
|
||||||
|
const task = await fixture.store.getRunTask("task-custom-dockerfile-node");
|
||||||
|
|
||||||
|
assert.equal(task?.status, "success");
|
||||||
|
assert.equal(task?.summary?.executorType, "docker");
|
||||||
|
assert.match(task?.stdoutLines?.[0] ?? "", /custom dockerfile handled 2 assets/i);
|
||||||
|
assert.deepEqual(task?.lastResultPreview, {
|
||||||
|
assetIds: ["asset-custom-1", "asset-custom-2"],
|
||||||
|
assetCount: 2,
|
||||||
|
kind: "custom-dockerfile",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|||||||
@ -6,6 +6,13 @@ EmboFlow is a browser-based embodied data engineering platform for ingesting raw
|
|||||||
|
|
||||||
The platform is designed around plugin-based extensibility, but the first version should deliver a stable built-in core before opening broader extension surfaces.
|
The platform is designed around plugin-based extensibility, but the first version should deliver a stable built-in core before opening broader extension surfaces.
|
||||||
|
|
||||||
|
The current V1 implementation exposes that core through four first-class product objects:
|
||||||
|
|
||||||
|
- `Project`
|
||||||
|
- `Asset`
|
||||||
|
- `Dataset`
|
||||||
|
- `WorkflowTemplate`
|
||||||
|
|
||||||
## Primary Users
|
## Primary Users
|
||||||
|
|
||||||
- Individual engineers building embodied datasets
|
- Individual engineers building embodied datasets
|
||||||
@ -19,12 +26,14 @@ Build a usable end-to-end platform that allows users to:
|
|||||||
|
|
||||||
1. Log into a personal or team workspace
|
1. Log into a personal or team workspace
|
||||||
2. Create a project
|
2. Create a project
|
||||||
3. Upload or import raw embodied data assets
|
3. Configure project storage connections for local paths or object storage
|
||||||
4. Auto-detect asset structure and generate preview summaries
|
4. Upload or import raw embodied data assets
|
||||||
5. Compose processing pipelines on a canvas
|
5. Derive reusable datasets from project assets
|
||||||
6. Configure node parameters and inject code into processing nodes
|
6. Auto-detect asset structure and generate preview summaries
|
||||||
7. Execute workflows asynchronously and inspect logs and outputs
|
7. Start a workflow from a reusable template or compose one from a blank canvas
|
||||||
8. Export normalized delivery packages, training datasets, or training config files
|
8. Configure node parameters and inject code into processing nodes
|
||||||
|
9. Execute workflows asynchronously and inspect logs and outputs
|
||||||
|
10. Export normalized delivery packages, training datasets, or training config files
|
||||||
|
|
||||||
## Supported Input Formats in V1
|
## Supported Input Formats in V1
|
||||||
|
|
||||||
@ -47,6 +56,7 @@ Build a usable end-to-end platform that allows users to:
|
|||||||
|
|
||||||
## Major Workspaces
|
## Major Workspaces
|
||||||
|
|
||||||
|
- Project Workspace: create and switch project contexts
|
||||||
- Asset Workspace: upload, import, scan, probe, browse
|
- Asset Workspace: upload, import, scan, probe, browse
|
||||||
- Canvas Workspace: build and run workflows
|
- Canvas Workspace: build and run workflows
|
||||||
- Explore Workspace: inspect raw assets and processed outputs
|
- Explore Workspace: inspect raw assets and processed outputs
|
||||||
|
|||||||
@ -63,6 +63,20 @@ The current V1 editor implementation keeps a mutable local draft that is initial
|
|||||||
|
|
||||||
The current local runtime also persists per-node runtime config under `runtimeGraph.nodeConfigs`. That config includes executor overrides, executor-specific config payloads, optional artifact metadata, and Python code-hook source for supported node categories. When a run is created, the API freezes those node configs into `workflow_runs.runtimeSnapshot` and copies the effective executor choice plus code-hook snapshot onto each `run_task`.
|
The current local runtime also persists per-node runtime config under `runtimeGraph.nodeConfigs`. That config includes executor overrides, executor-specific config payloads, optional artifact metadata, and Python code-hook source for supported node categories. When a run is created, the API freezes those node configs into `workflow_runs.runtimeSnapshot` and copies the effective executor choice plus code-hook snapshot onto each `run_task`.
|
||||||
|
|
||||||
|
The current built-in delivery node library is now Docker-first by default. Unless a workflow author overrides a node runtime config, these built-ins resolve to `executorType=docker` with a local Python container image and `networkMode=none`:
|
||||||
|
|
||||||
|
- `source-asset`
|
||||||
|
- `extract-archive`
|
||||||
|
- `rename-folder`
|
||||||
|
- `validate-structure`
|
||||||
|
- `validate-metadata`
|
||||||
|
- `union-assets`
|
||||||
|
- `intersect-assets`
|
||||||
|
- `difference-assets`
|
||||||
|
- `export-delivery-package`
|
||||||
|
|
||||||
|
This keeps most default processing isolated from the API and worker host processes while still letting individual workflows opt back into `python` or `http`.
|
||||||
|
|
||||||
## Node Categories
|
## Node Categories
|
||||||
|
|
||||||
V1 node categories:
|
V1 node categories:
|
||||||
@ -88,6 +102,10 @@ V1 node categories:
|
|||||||
- training config export
|
- training config export
|
||||||
- Python processing node
|
- Python processing node
|
||||||
|
|
||||||
|
The current V1 runtime also supports project-level custom Docker nodes. A custom node is registered separately from the workflow graph, then exposed through the same node-definition surface as built-in nodes.
|
||||||
|
|
||||||
|
When the user drops one of these node definitions into the editor, the draft should immediately inherit the node's default runtime snapshot. In practice this means the seeded `nodeConfig` already carries the declared executor type, executor config, and contract before the user opens the right-side panel.
|
||||||
|
|
||||||
## Node Definition Contract
|
## Node Definition Contract
|
||||||
|
|
||||||
Each node definition must expose:
|
Each node definition must expose:
|
||||||
@ -126,6 +144,40 @@ def process(input_data, context):
|
|||||||
|
|
||||||
This keeps serialization, logging, and runtime control predictable.
|
This keeps serialization, logging, and runtime control predictable.
|
||||||
|
|
||||||
|
### Custom Docker Node Contract
|
||||||
|
|
||||||
|
Custom containerized nodes must implement the EmboFlow runtime contract instead of inventing their own I/O shape.
|
||||||
|
|
||||||
|
Container input:
|
||||||
|
|
||||||
|
- `EMBOFLOW_INPUT_PATH`
|
||||||
|
points to a JSON file containing the frozen `task` snapshot and the execution `context`
|
||||||
|
- `EMBOFLOW_OUTPUT_PATH`
|
||||||
|
points to the JSON file the container must write before exit
|
||||||
|
|
||||||
|
Expected `context` shape:
|
||||||
|
|
||||||
|
- `assetIds`
|
||||||
|
- `assets`
|
||||||
|
- `upstreamResults`
|
||||||
|
- run and node identifiers
|
||||||
|
|
||||||
|
Expected output shape:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"result": {
|
||||||
|
"...": "..."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
If the custom node declares an `asset_set` style output contract, `result.assetIds` must be a string array. This is what allows downstream nodes to inherit the narrowed asset set.
|
||||||
|
|
||||||
|
If the custom node declares `contract.inputMode = "multi_asset_set"`, the canvas should treat that node as multi-input at authoring time instead of forcing the user through single-input validation rules. The graph validator should derive this capability from the seeded runtime contract, not from a hardcoded node id list alone.
|
||||||
|
|
||||||
|
The current V1 validation boundary now rejects structurally invalid custom nodes before they enter the project registry. This includes missing names, unsupported source kinds, Dockerfiles without a `FROM` instruction, and `Source` category nodes that incorrectly declare `multi_asset_set` input.
|
||||||
|
|
||||||
The current V1 worker executes trusted-local Python hooks when a `run_task` carries a `codeHookSpec`. The hook is executed through a constrained Python harness with the task snapshot and execution context passed in as JSON. Hook stdout is captured into `stdoutLines`, hook failures populate `stderrLines`, and the returned object becomes the task artifact payload.
|
The current V1 worker executes trusted-local Python hooks when a `run_task` carries a `codeHookSpec`. The hook is executed through a constrained Python harness with the task snapshot and execution context passed in as JSON. Hook stdout is captured into `stdoutLines`, hook failures populate `stderrLines`, and the returned object becomes the task artifact payload.
|
||||||
|
|
||||||
The current V1 Docker executor now has two modes:
|
The current V1 Docker executor now has two modes:
|
||||||
@ -142,8 +194,18 @@ In real container mode the worker:
|
|||||||
- captures container stdout and stderr from the Docker CLI process
|
- captures container stdout and stderr from the Docker CLI process
|
||||||
- parses `output.json` back into the task artifact payload when present
|
- parses `output.json` back into the task artifact payload when present
|
||||||
|
|
||||||
|
Optional hook metadata must remain optional in this path. The current V1 Docker runner now treats missing or explicit `null` `codeHookSpec` values as “no hook configured” instead of attempting to execute them. This keeps built-in Docker nodes and custom nodes on the same task schema without adding fake hook payloads.
|
||||||
|
|
||||||
The default Docker runtime policy is `--network none`. This keeps V1 safer for local processing nodes unless a later phase deliberately opens network access for containerized tasks.
|
The default Docker runtime policy is `--network none`. This keeps V1 safer for local processing nodes unless a later phase deliberately opens network access for containerized tasks.
|
||||||
|
|
||||||
|
The V1 worker now also carries direct upstream task previews into the execution context. This is what makes multi-input set nodes executable instead of purely visual:
|
||||||
|
|
||||||
|
- `union-assets` merges all upstream asset ids
|
||||||
|
- `intersect-assets` keeps only the shared asset ids
|
||||||
|
- `difference-assets` subtracts later upstream sets from the first upstream set
|
||||||
|
|
||||||
|
When one upstream node produces a narrowed asset set, the worker treats that effective asset set as the execution input for the downstream task and writes it back to the successful `run_task`.
|
||||||
|
|
||||||
## Data Flow Contract
|
## Data Flow Contract
|
||||||
|
|
||||||
Tasks should exchange managed references, not loose file paths.
|
Tasks should exchange managed references, not loose file paths.
|
||||||
@ -173,12 +235,14 @@ Workflow execution must validate in this order:
|
|||||||
|
|
||||||
Validation failure must block run creation.
|
Validation failure must block run creation.
|
||||||
|
|
||||||
|
The current V1 API now exposes this as a real preflight step, not only as an editor convention. `POST /api/runs/preflight` evaluates the saved workflow version against the selected workflow input bindings and frozen runtime snapshot. `POST /api/runs` reuses the same checks and rejects run creation when any blocking issue remains.
|
||||||
|
|
||||||
## Run Lifecycle
|
## Run Lifecycle
|
||||||
|
|
||||||
When a user executes a workflow:
|
When a user executes a workflow:
|
||||||
|
|
||||||
1. resolve workflow version
|
1. resolve workflow version
|
||||||
2. validate and snapshot all runtime-relevant inputs, including bound asset references
|
2. validate and snapshot all runtime-relevant inputs, including bound asset and dataset references
|
||||||
3. resolve plugin versions
|
3. resolve plugin versions
|
||||||
4. freeze node config and code hooks
|
4. freeze node config and code hooks
|
||||||
5. compile graph into a DAG
|
5. compile graph into a DAG
|
||||||
@ -188,6 +252,19 @@ When a user executes a workflow:
|
|||||||
9. collect outputs, logs, and task state
|
9. collect outputs, logs, and task state
|
||||||
10. finalize run status and summary
|
10. finalize run status and summary
|
||||||
|
|
||||||
|
The current preflight checks include:
|
||||||
|
|
||||||
|
- workflow definition and version linkage
|
||||||
|
- workflow input binding presence
|
||||||
|
- bound asset existence and project match
|
||||||
|
- bound dataset existence and project match
|
||||||
|
- resolution of dataset bindings into runnable asset ids
|
||||||
|
- resolved node definition existence
|
||||||
|
- source and export edge direction rules
|
||||||
|
- multi-input eligibility
|
||||||
|
- executor-specific required config such as Docker image or HTTP URL
|
||||||
|
- non-empty code hook source when a hook is present
|
||||||
|
|
||||||
## Run State Model
|
## Run State Model
|
||||||
|
|
||||||
### WorkflowRun Status
|
### WorkflowRun Status
|
||||||
@ -293,7 +370,8 @@ The persisted local runtime now covers:
|
|||||||
- asset registration and probe reporting
|
- asset registration and probe reporting
|
||||||
- workflow definition and immutable version snapshots
|
- workflow definition and immutable version snapshots
|
||||||
- workflow runs and task creation with worker-consumable dependency snapshots
|
- workflow runs and task creation with worker-consumable dependency snapshots
|
||||||
- workflow run asset bindings persisted on both runs and tasks
|
- workflow run input bindings persisted on both runs and tasks
|
||||||
|
- resolved asset ids and explicit dataset ids persisted separately on both runs and tasks
|
||||||
- project-scoped run history queries from Mongo-backed `workflow_runs`
|
- project-scoped run history queries from Mongo-backed `workflow_runs`
|
||||||
- worker polling of queued tasks from Mongo-backed `run_tasks`
|
- worker polling of queued tasks from Mongo-backed `run_tasks`
|
||||||
- run-task status transitions from `queued/pending` to `running/success/failed`
|
- run-task status transitions from `queued/pending` to `running/success/failed`
|
||||||
|
|||||||
@ -12,7 +12,9 @@ Top-level product areas:
|
|||||||
|
|
||||||
- Workspace switcher
|
- Workspace switcher
|
||||||
- Project selector
|
- Project selector
|
||||||
|
- Projects
|
||||||
- Asset Workspace
|
- Asset Workspace
|
||||||
|
- Node Registry Workspace
|
||||||
- Canvas Workspace
|
- Canvas Workspace
|
||||||
- Explore Workspace
|
- Explore Workspace
|
||||||
- Label Workspace
|
- Label Workspace
|
||||||
@ -35,7 +37,9 @@ Recommended global header content:
|
|||||||
|
|
||||||
Recommended primary navigation:
|
Recommended primary navigation:
|
||||||
|
|
||||||
|
- Projects
|
||||||
- Assets
|
- Assets
|
||||||
|
- Nodes
|
||||||
- Workflows
|
- Workflows
|
||||||
- Runs
|
- Runs
|
||||||
- Explore
|
- Explore
|
||||||
@ -59,6 +63,12 @@ Purpose:
|
|||||||
|
|
||||||
V1 should emphasize project-level organization because all major resources are project-scoped.
|
V1 should emphasize project-level organization because all major resources are project-scoped.
|
||||||
|
|
||||||
|
The current implementation now matches this with:
|
||||||
|
|
||||||
|
- a dedicated `Projects` sidebar entry
|
||||||
|
- a header-level active project selector
|
||||||
|
- project cards that let the user open a project directly into workflow authoring
|
||||||
|
|
||||||
## Screen 2: Asset Workspace
|
## Screen 2: Asset Workspace
|
||||||
|
|
||||||
Purpose:
|
Purpose:
|
||||||
@ -85,6 +95,12 @@ Key actions:
|
|||||||
- open preview
|
- open preview
|
||||||
- create workflow from asset
|
- create workflow from asset
|
||||||
|
|
||||||
|
The current V1 runtime extends this screen beyond raw assets and treats project data management as one combined workspace:
|
||||||
|
|
||||||
|
- raw asset registration
|
||||||
|
- storage connection creation for `local`, `minio`, `s3`, `bos`, and `oss`
|
||||||
|
- dataset creation from selected source assets into a selected storage connection
|
||||||
|
|
||||||
## Screen 3: Asset Detail / Explore Entry
|
## Screen 3: Asset Detail / Explore Entry
|
||||||
|
|
||||||
Purpose:
|
Purpose:
|
||||||
@ -143,24 +159,73 @@ Supports:
|
|||||||
The current V1 implementation is simpler than the target canvas UX, but it already follows the same persistence model:
|
The current V1 implementation is simpler than the target canvas UX, but it already follows the same persistence model:
|
||||||
|
|
||||||
- load the latest saved workflow version when the editor opens
|
- load the latest saved workflow version when the editor opens
|
||||||
- load project assets so the run entrypoint can bind a concrete input asset
|
- load project assets and datasets so the run entrypoint can bind a concrete input source
|
||||||
- keep an unsaved draft in local editor state
|
- keep an unsaved draft in local editor state
|
||||||
- allow node add and remove operations on the draft
|
- allow node add and remove operations on the draft
|
||||||
- save the current draft as a new workflow version
|
- save the current draft as a new workflow version
|
||||||
- auto-save a dirty draft before triggering a run
|
- auto-save a dirty draft before triggering a run
|
||||||
|
- run a workflow-level preflight check against the latest saved version and selected bound asset or dataset before execution
|
||||||
|
|
||||||
The current runtime implementation now also renders the center surface as a real node canvas instead of a static placeholder list:
|
The current runtime implementation now also renders the center surface as a real node canvas instead of a static placeholder list:
|
||||||
|
|
||||||
- free node dragging on the canvas
|
- free node dragging on the canvas
|
||||||
|
- left-panel node drag-and-drop into the canvas, in addition to click-to-append
|
||||||
- drag-to-connect edges between node handles
|
- drag-to-connect edges between node handles
|
||||||
- zoom and pan
|
- zoom and pan
|
||||||
- dotted background grid
|
- dotted background grid
|
||||||
- mini-map
|
- mini-map
|
||||||
- canvas controls
|
- canvas controls
|
||||||
- persisted node positions and viewport in `visualGraph`
|
- persisted node positions and viewport in `visualGraph`
|
||||||
|
- localized inline validation feedback when a connection is rejected
|
||||||
|
|
||||||
|
The current V1 runtime header in the workflow editor now also treats run input selection as a first-class control:
|
||||||
|
|
||||||
|
- choose input source type as `asset` or `dataset`
|
||||||
|
- choose a concrete project asset or dataset inside that type
|
||||||
|
- pass the selected source through preflight and run creation
|
||||||
|
- show the resolved source again in run detail as `input sources`, `input assets`, and `input datasets`
|
||||||
|
|
||||||
|
The current V1 authoring rules intentionally keep the graph model constrained so the workflow stays legible and executable:
|
||||||
|
|
||||||
|
- source nodes do not accept inbound edges
|
||||||
|
- export nodes do not emit outbound edges
|
||||||
|
- duplicate edges are blocked
|
||||||
|
- self-edges are blocked
|
||||||
|
- ordinary nodes may only keep one inbound edge
|
||||||
|
- set-operation utility nodes may accept multiple inbound edges
|
||||||
|
- cycles are blocked
|
||||||
|
|
||||||
|
Custom nodes follow the same rule system, but the decision is contract-driven. When a custom Docker node is added to the canvas, the editor seeds its runtime defaults into the draft immediately; if that contract declares `inputMode=multi_asset_set`, the node is treated like a multi-input utility node from the first connection attempt.
|
||||||
|
|
||||||
|
The current built-in node library also exposes Docker-first runtime defaults in the editor. Most built-ins now render with `docker` preselected, while still allowing the user to override the executor, image, and optional Python code hook from the right-side configuration panel.
|
||||||
|
|
||||||
The runtime header also now exposes a visible `中文 / English` language toggle and the main shell plus workflow authoring surface are translated through a lightweight i18n layer.
|
The runtime header also now exposes a visible `中文 / English` language toggle and the main shell plus workflow authoring surface are translated through a lightweight i18n layer.
|
||||||
|
|
||||||
|
The workflow entry surface that leads into this editor is also now template-aware:
|
||||||
|
|
||||||
|
- the Workflows page lists reusable workflow templates for the active project
|
||||||
|
- the user can create a project workflow from a template
|
||||||
|
- the user can still create a blank workflow directly
|
||||||
|
|
||||||
|
## Screen 4A: Node Registry Workspace
|
||||||
|
|
||||||
|
Purpose:
|
||||||
|
|
||||||
|
- create project-level custom nodes
|
||||||
|
- choose Docker image or Dockerfile source
|
||||||
|
- declare input and output contract
|
||||||
|
- publish the node into the workflow editor node library
|
||||||
|
|
||||||
|
Core regions:
|
||||||
|
|
||||||
|
- top: contract summary and authoring guidance
|
||||||
|
- left/top: creation form
|
||||||
|
- bottom/right: existing custom node list for the active project
|
||||||
|
|
||||||
|
The creation form should validate node definitions before submit, not only after an API round-trip. In the current V1 direction the UI and API share the same rules for required names, valid image or Dockerfile sources, mandatory `FROM` instructions in Dockerfiles, and invalid category-contract combinations such as `Source + multi_asset_set`.
|
||||||
|
|
||||||
|
The current V1 direction treats custom nodes as project-scoped runtime extensions, not global plugins. That keeps tenancy and lifecycle simpler while still giving teams a controlled way to bring containerized processing into the canvas.
|
||||||
|
|
||||||
### Right Configuration Panel
|
### Right Configuration Panel
|
||||||
|
|
||||||
The right panel is schema-driven.
|
The right panel is schema-driven.
|
||||||
@ -178,6 +243,10 @@ It should render:
|
|||||||
|
|
||||||
This panel is critical. It should feel like a structured system console, not a generic form dump.
|
This panel is critical. It should feel like a structured system console, not a generic form dump.
|
||||||
|
|
||||||
|
The current right panel also includes a workflow-level `Save As Template` section so an edited graph can be published back into the project template library.
|
||||||
|
For project-scoped custom nodes, the right panel should also surface the declared contract summary directly from the node definition, including input mode, output mode, artifact type, and whether the backing runtime came from a Docker image or Dockerfile definition.
|
||||||
|
The current V1 direction now also renders the standard EmboFlow input and output envelope preview for selected custom nodes, so container authors can see the exact JSON shape expected by the runtime without leaving the editor.
|
||||||
|
|
||||||
## Screen 5: Workflow Run Detail
|
## Screen 5: Workflow Run Detail
|
||||||
|
|
||||||
Purpose:
|
Purpose:
|
||||||
@ -191,7 +260,7 @@ Purpose:
|
|||||||
Recommended layout:
|
Recommended layout:
|
||||||
|
|
||||||
- top: run summary and status
|
- top: run summary and status
|
||||||
- top: bound asset summary and links back to input assets
|
- top: bound input source summary and links back to input assets or datasets
|
||||||
- center: workflow graph with execution overlays
|
- center: workflow graph with execution overlays
|
||||||
- bottom or side drawer: logs and artifacts for selected node
|
- bottom or side drawer: logs and artifacts for selected node
|
||||||
|
|
||||||
|
|||||||
@ -55,6 +55,7 @@ while still targeting the collection model below as the persistent shape.
|
|||||||
- `annotations`
|
- `annotations`
|
||||||
- `plugins`
|
- `plugins`
|
||||||
- `storage_connections`
|
- `storage_connections`
|
||||||
|
- `custom_nodes`
|
||||||
- `audit_logs`
|
- `audit_logs`
|
||||||
|
|
||||||
## Collection Design
|
## Collection Design
|
||||||
@ -209,6 +210,36 @@ Core fields:
|
|||||||
- `createdAt`
|
- `createdAt`
|
||||||
- `updatedAt`
|
- `updatedAt`
|
||||||
|
|
||||||
|
### custom_nodes
|
||||||
|
|
||||||
|
Purpose:
|
||||||
|
|
||||||
|
- store project-scoped custom container node definitions
|
||||||
|
|
||||||
|
Core fields:
|
||||||
|
|
||||||
|
- `_id`
|
||||||
|
- `definitionId`
|
||||||
|
- `workspaceId`
|
||||||
|
- `projectId`
|
||||||
|
- `name`
|
||||||
|
- `slug`
|
||||||
|
- `description`
|
||||||
|
- `category`
|
||||||
|
- `status`
|
||||||
|
- `contract`
|
||||||
|
- `source`
|
||||||
|
- `createdBy`
|
||||||
|
- `createdAt`
|
||||||
|
- `updatedAt`
|
||||||
|
|
||||||
|
The current V1 implementation stores the custom node source as either:
|
||||||
|
|
||||||
|
- an existing Docker image reference
|
||||||
|
- a self-contained Dockerfile body plus an image tag
|
||||||
|
|
||||||
|
The node contract is persisted with the node definition so the API can expose correct node metadata to the editor and the worker can validate runtime outputs.
|
||||||
|
|
||||||
### dataset_versions
|
### dataset_versions
|
||||||
|
|
||||||
Purpose:
|
Purpose:
|
||||||
@ -347,7 +378,7 @@ The current executable worker path expects `run_tasks` to be self-sufficient eno
|
|||||||
|
|
||||||
- executor choice
|
- executor choice
|
||||||
- node definition id and frozen per-node runtime config
|
- node definition id and frozen per-node runtime config
|
||||||
- bound asset ids
|
- bound asset ids at run creation time, then the effective asset ids that were actually executed after any upstream set-operation narrowing
|
||||||
- upstream node dependencies
|
- upstream node dependencies
|
||||||
- produced artifact ids
|
- produced artifact ids
|
||||||
- per-task status and error message
|
- per-task status and error message
|
||||||
@ -400,6 +431,19 @@ Core fields:
|
|||||||
- `createdBy`
|
- `createdBy`
|
||||||
- `createdAt`
|
- `createdAt`
|
||||||
|
|
||||||
|
### workflow_runs and run_tasks input binding note
|
||||||
|
|
||||||
|
The current V1 runtime now stores workflow input selection in three layers:
|
||||||
|
|
||||||
|
- `inputBindings`
|
||||||
|
The explicit operator-facing selection such as `[{ kind: "dataset", id: "dataset-..." }]`
|
||||||
|
- `assetIds`
|
||||||
|
The resolved runnable asset ids after dataset expansion and deduplication
|
||||||
|
- `datasetIds`
|
||||||
|
The explicit dataset ids that participated in the run or task
|
||||||
|
|
||||||
|
This keeps execution backward-compatible for asset-oriented nodes while preserving the higher-level project data model in run history and task detail.
|
||||||
|
|
||||||
### annotation_tasks
|
### annotation_tasks
|
||||||
|
|
||||||
Purpose:
|
Purpose:
|
||||||
|
|||||||
@ -27,6 +27,10 @@
|
|||||||
- `2026-03-27`: The current Docker-runtime pass upgrades `executorType=docker` from a pure stub to a real local container execution path whenever `executorConfig.image` is provided, while retaining a compatibility fallback for older demo tasks without an image.
|
- `2026-03-27`: The current Docker-runtime pass upgrades `executorType=docker` from a pure stub to a real local container execution path whenever `executorConfig.image` is provided, while retaining a compatibility fallback for older demo tasks without an image.
|
||||||
- `2026-03-27`: The current built-in-node pass enriches the worker execution context with bound asset metadata and gives the default Python implementations for `source-asset` and `validate-structure` real delivery-oriented behavior instead of placeholder output.
|
- `2026-03-27`: The current built-in-node pass enriches the worker execution context with bound asset metadata and gives the default Python implementations for `source-asset` and `validate-structure` real delivery-oriented behavior instead of placeholder output.
|
||||||
- `2026-03-27`: The current web-authoring pass adds a visible zh/en language switcher, a lightweight i18n layer for the runtime shell, and a real React Flow canvas with persisted node positions and viewport instead of the earlier static node list.
|
- `2026-03-27`: The current web-authoring pass adds a visible zh/en language switcher, a lightweight i18n layer for the runtime shell, and a real React Flow canvas with persisted node positions and viewport instead of the earlier static node list.
|
||||||
|
- `2026-03-27`: The follow-up canvas pass adds left-panel drag-and-drop node placement, localized canvas feedback, and V1 connection guards for self-edges, duplicates, cycles, invalid source/export directions, and multiple inbound edges.
|
||||||
|
- `2026-03-30`: The current product-integration pass promotes projects, datasets, storage connections, and workflow templates into first-class runtime flows. The shell now has a dedicated Projects page, project switching, workflow template gallery, workflow creation from templates, and workflow-level save-as-template support.
|
||||||
|
- `2026-03-30`: The current docker-defaults pass makes most built-in delivery nodes Docker-first by default, adds `union-assets` / `intersect-assets` / `difference-assets` utility nodes, permits multi-input edges only for those set nodes, and propagates narrowed upstream asset sets through downstream task execution.
|
||||||
|
- `2026-03-30`: The current custom-node pass adds a project-scoped `Nodes` tab, custom Docker node registration from image or self-contained Dockerfile, a persisted custom node collection, workflow-editor visibility through `/api/node-definitions`, and worker-side Dockerfile build plus output-contract validation.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|||||||
123
docs/plans/2026-03-30-project-dataset-template-design.md
Normal file
123
docs/plans/2026-03-30-project-dataset-template-design.md
Normal file
@ -0,0 +1,123 @@
|
|||||||
|
# EmboFlow Project Dataset Template Design
|
||||||
|
|
||||||
|
## Goal
|
||||||
|
|
||||||
|
Define the next V1 product slice that turns the current runtime skeleton into a project-centric data workflow console with first-class datasets, storage connections, and workflow templates.
|
||||||
|
|
||||||
|
## Approved Boundary
|
||||||
|
|
||||||
|
- `Asset` remains the raw input object
|
||||||
|
- `Dataset` becomes a project-scoped first-class object
|
||||||
|
- `StorageConnection` becomes the place where datasets choose their persistence target
|
||||||
|
- `WorkflowTemplate` becomes the reusable authoring entrypoint for workflows
|
||||||
|
|
||||||
|
## Current Implementation Baseline
|
||||||
|
|
||||||
|
The current codebase already has:
|
||||||
|
|
||||||
|
- Mongo-backed `storage_connections`, `datasets`, `dataset_versions`, and `workflow_templates`
|
||||||
|
- HTTP endpoints for creating and listing those objects
|
||||||
|
- an asset page that already exposes storage connection and dataset creation forms
|
||||||
|
- a workflow editor with a large React Flow canvas, node drag and drop, edge creation, and Python code-hook editing
|
||||||
|
- workflow creation from blank definitions
|
||||||
|
|
||||||
|
The missing layer is product integration:
|
||||||
|
|
||||||
|
- project switching and project creation in the main shell
|
||||||
|
- a visible project workspace instead of a fixed bootstrap project
|
||||||
|
- workflow template selection on the workflows landing page
|
||||||
|
- template-based workflow creation as a first-class action
|
||||||
|
- saving an edited workflow as a reusable template
|
||||||
|
|
||||||
|
## Product Model
|
||||||
|
|
||||||
|
### Workspace
|
||||||
|
|
||||||
|
The workspace owns:
|
||||||
|
|
||||||
|
- projects
|
||||||
|
- storage connections
|
||||||
|
- workspace-scoped workflow templates
|
||||||
|
|
||||||
|
### Project
|
||||||
|
|
||||||
|
The project owns:
|
||||||
|
|
||||||
|
- assets
|
||||||
|
- datasets
|
||||||
|
- workflow definitions
|
||||||
|
- workflow runs
|
||||||
|
- project-scoped workflow templates
|
||||||
|
|
||||||
|
### Asset vs Dataset
|
||||||
|
|
||||||
|
- `Asset` is the raw import or registered source
|
||||||
|
- `Dataset` is the reusable project data product
|
||||||
|
- A dataset references one or more source assets and one storage connection
|
||||||
|
- Dataset versions remain immutable snapshots under the dataset
|
||||||
|
|
||||||
|
## UX Changes
|
||||||
|
|
||||||
|
### Header
|
||||||
|
|
||||||
|
The header should expose:
|
||||||
|
|
||||||
|
- workspace name
|
||||||
|
- active project selector
|
||||||
|
- quick create project action
|
||||||
|
- language switcher
|
||||||
|
|
||||||
|
### Projects Page
|
||||||
|
|
||||||
|
Add a dedicated projects page to:
|
||||||
|
|
||||||
|
- list existing projects
|
||||||
|
- create a new project
|
||||||
|
- switch the active project
|
||||||
|
- show lightweight counts for assets, datasets, workflows, and runs
|
||||||
|
|
||||||
|
### Assets Page
|
||||||
|
|
||||||
|
Keep the existing asset page as the project data hub:
|
||||||
|
|
||||||
|
- raw asset registration
|
||||||
|
- storage connection management
|
||||||
|
- dataset creation
|
||||||
|
- project asset list
|
||||||
|
|
||||||
|
### Workflows Page
|
||||||
|
|
||||||
|
Split the current workflows landing page into two clear entry paths:
|
||||||
|
|
||||||
|
- start from template
|
||||||
|
- start from blank workflow
|
||||||
|
|
||||||
|
Each template card should support:
|
||||||
|
|
||||||
|
- create workflow from template
|
||||||
|
- open the template-backed workflow after creation
|
||||||
|
|
||||||
|
### Workflow Editor
|
||||||
|
|
||||||
|
Keep the large canvas and runtime configuration model, and add:
|
||||||
|
|
||||||
|
- save current workflow as template
|
||||||
|
- explicit template name and description inputs for that action
|
||||||
|
- no reduction in current node-level editing power
|
||||||
|
|
||||||
|
## Implementation Rules
|
||||||
|
|
||||||
|
- do not replace the current `Asset` run binding model in this slice
|
||||||
|
- do not move storage connection management to a different backend model
|
||||||
|
- do not introduce a new visual framework for the canvas
|
||||||
|
- reuse current Mongo collections and runtime store methods where possible
|
||||||
|
|
||||||
|
## Success Criteria
|
||||||
|
|
||||||
|
The slice is done when:
|
||||||
|
|
||||||
|
1. users can create and switch projects without restarting bootstrap context
|
||||||
|
2. datasets are visibly project-scoped and backed by a chosen storage connection
|
||||||
|
3. workflows can be created either from a template or from a blank definition
|
||||||
|
4. edited workflows can be saved back as reusable templates
|
||||||
|
5. the canvas remains the primary authoring surface with runtime config and Python hook editing intact
|
||||||
239
docs/plans/2026-03-30-project-dataset-template-implementation.md
Normal file
239
docs/plans/2026-03-30-project-dataset-template-implementation.md
Normal file
@ -0,0 +1,239 @@
|
|||||||
|
# EmboFlow Project Dataset Template Implementation Plan
|
||||||
|
|
||||||
|
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
|
||||||
|
|
||||||
|
**Goal:** Turn the current runtime shell into a project-centric product surface with project switching, project creation, workflow templates, and first-class dataset/storage management.
|
||||||
|
|
||||||
|
**Architecture:** Reuse the existing Mongo runtime store and HTTP API for storage connections, datasets, dataset versions, and workflow templates. Extend the web runtime shell so the active project becomes selectable, project resources reload against the selected project, and workflows can be created from templates or from a blank canvas. Keep `Asset` as raw input and `Dataset` as the project data product.
|
||||||
|
|
||||||
|
**Tech Stack:** React, TypeScript, React Flow, Express runtime API, MongoDB, `tsx --test`, Python unittest, and repo guardrails.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Task 1: Add Project Runtime API Client Support
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Modify: `apps/web/src/runtime/api-client.ts`
|
||||||
|
|
||||||
|
**Step 1: Write the failing test**
|
||||||
|
|
||||||
|
Use the existing runtime integration coverage as the external contract and add a focused web runtime helper test only if a new pure helper is introduced.
|
||||||
|
|
||||||
|
**Step 2: Implement the minimal API additions**
|
||||||
|
|
||||||
|
Add:
|
||||||
|
|
||||||
|
- `listProjects(workspaceId)`
|
||||||
|
- `createProject({ workspaceId, name, description, createdBy })`
|
||||||
|
|
||||||
|
Keep the current request/response style unchanged.
|
||||||
|
|
||||||
|
**Step 3: Verify**
|
||||||
|
|
||||||
|
Run:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pnpm --filter web build
|
||||||
|
```
|
||||||
|
|
||||||
|
Expected: PASS
|
||||||
|
|
||||||
|
**Step 4: Commit**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git add apps/web/src/runtime/api-client.ts
|
||||||
|
git commit -m ":sparkles: add project runtime api client support"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Task 2: Make The Shell Project-Aware
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Modify: `apps/web/src/runtime/app.tsx`
|
||||||
|
- Modify: `apps/web/src/runtime/i18n.tsx`
|
||||||
|
- Modify: `apps/web/src/styles.css`
|
||||||
|
|
||||||
|
**Step 1: Write the failing test**
|
||||||
|
|
||||||
|
Add a focused runtime state or pure helper test if needed for active project resolution.
|
||||||
|
|
||||||
|
**Step 2: Implement**
|
||||||
|
|
||||||
|
Add:
|
||||||
|
|
||||||
|
- active project state in `App`
|
||||||
|
- project list loading after bootstrap
|
||||||
|
- project selector in the header
|
||||||
|
- quick create project action
|
||||||
|
- route-safe project switching behavior
|
||||||
|
- a `Projects` nav item and page entry
|
||||||
|
|
||||||
|
**Step 3: Verify**
|
||||||
|
|
||||||
|
Run:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pnpm --filter web build
|
||||||
|
```
|
||||||
|
|
||||||
|
Expected: PASS
|
||||||
|
|
||||||
|
**Step 4: Commit**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git add apps/web/src/runtime/app.tsx apps/web/src/runtime/i18n.tsx apps/web/src/styles.css
|
||||||
|
git commit -m ":sparkles: add project-aware shell state"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Task 3: Add A Dedicated Projects Page
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Modify: `apps/web/src/runtime/app.tsx`
|
||||||
|
- Modify: `apps/web/src/runtime/i18n.tsx`
|
||||||
|
|
||||||
|
**Step 1: Write the failing test**
|
||||||
|
|
||||||
|
Add a web runtime test or browser validation script target if a helper is introduced.
|
||||||
|
|
||||||
|
**Step 2: Implement**
|
||||||
|
|
||||||
|
Create a projects workspace view that:
|
||||||
|
|
||||||
|
- lists projects for the current workspace
|
||||||
|
- marks the active project
|
||||||
|
- creates a project
|
||||||
|
- lets the user switch into a project
|
||||||
|
- shows lightweight resource counts derived from existing APIs
|
||||||
|
|
||||||
|
**Step 3: Verify**
|
||||||
|
|
||||||
|
Run:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pnpm --filter web build
|
||||||
|
```
|
||||||
|
|
||||||
|
Expected: PASS
|
||||||
|
|
||||||
|
**Step 4: Commit**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git add apps/web/src/runtime/app.tsx apps/web/src/runtime/i18n.tsx
|
||||||
|
git commit -m ":sparkles: add projects workspace"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Task 4: Turn Workflows Landing Into A Template Entry Surface
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Modify: `apps/web/src/runtime/app.tsx`
|
||||||
|
- Modify: `apps/web/src/runtime/i18n.tsx`
|
||||||
|
|
||||||
|
**Step 1: Write the failing test**
|
||||||
|
|
||||||
|
Add a focused browser validation script or pure helper test for template naming if needed.
|
||||||
|
|
||||||
|
**Step 2: Implement**
|
||||||
|
|
||||||
|
Update the workflows landing page to:
|
||||||
|
|
||||||
|
- load workflow templates for the active workspace/project
|
||||||
|
- render template cards
|
||||||
|
- create a workflow from a template
|
||||||
|
- still support blank workflow creation
|
||||||
|
- route into the created workflow editor
|
||||||
|
|
||||||
|
**Step 3: Verify**
|
||||||
|
|
||||||
|
Run:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pnpm --filter web build
|
||||||
|
```
|
||||||
|
|
||||||
|
Expected: PASS
|
||||||
|
|
||||||
|
**Step 4: Commit**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git add apps/web/src/runtime/app.tsx apps/web/src/runtime/i18n.tsx
|
||||||
|
git commit -m ":sparkles: add workflow template entry flow"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Task 5: Add Save-As-Template In The Workflow Editor
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Modify: `apps/web/src/runtime/app.tsx`
|
||||||
|
- Modify: `apps/web/src/runtime/i18n.tsx`
|
||||||
|
|
||||||
|
**Step 1: Write the failing test**
|
||||||
|
|
||||||
|
Add a minimal helper test if a template payload builder is introduced.
|
||||||
|
|
||||||
|
**Step 2: Implement**
|
||||||
|
|
||||||
|
Add editor controls to:
|
||||||
|
|
||||||
|
- enter template name and description
|
||||||
|
- save the current draft/version payload as a workflow template
|
||||||
|
- keep the large canvas and node runtime editing behavior intact
|
||||||
|
|
||||||
|
**Step 3: Verify**
|
||||||
|
|
||||||
|
Run:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pnpm --filter web build
|
||||||
|
```
|
||||||
|
|
||||||
|
Expected: PASS
|
||||||
|
|
||||||
|
**Step 4: Commit**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git add apps/web/src/runtime/app.tsx apps/web/src/runtime/i18n.tsx
|
||||||
|
git commit -m ":sparkles: add save as template workflow action"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Task 6: Update Docs And Run Full Verification
|
||||||
|
|
||||||
|
**Files:**
|
||||||
|
- Modify: `README.md`
|
||||||
|
- Modify: `design/00-overview/emboflow-platform-overview.md`
|
||||||
|
- Modify: `design/04-ui-ux/information-architecture-and-key-screens.md`
|
||||||
|
- Modify: `docs/plans/2026-03-26-emboflow-v1-foundation-and-mvp.md`
|
||||||
|
|
||||||
|
**Step 1: Update docs**
|
||||||
|
|
||||||
|
Document:
|
||||||
|
|
||||||
|
- project selector and projects workspace
|
||||||
|
- dataset/storage management as first-class project features
|
||||||
|
- workflow template entry and save-as-template flow
|
||||||
|
|
||||||
|
**Step 2: Run verification**
|
||||||
|
|
||||||
|
Run:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
make test
|
||||||
|
make guardrails
|
||||||
|
pnpm --filter web build
|
||||||
|
```
|
||||||
|
|
||||||
|
Expected: PASS
|
||||||
|
|
||||||
|
**Step 3: Browser validation**
|
||||||
|
|
||||||
|
Validate locally that:
|
||||||
|
|
||||||
|
- the active project can be changed
|
||||||
|
- a new project can be created
|
||||||
|
- a workflow can be created from a template
|
||||||
|
- a blank workflow can still be created
|
||||||
|
- an edited workflow can be saved as a template
|
||||||
|
|
||||||
|
**Step 4: Commit**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git add README.md design/00-overview/emboflow-platform-overview.md design/04-ui-ux/information-architecture-and-key-screens.md docs/plans/2026-03-26-emboflow-v1-foundation-and-mvp.md docs/plans/2026-03-30-project-dataset-template-design.md docs/plans/2026-03-30-project-dataset-template-implementation.md
|
||||||
|
git commit -m ":memo: document project dataset and template flow"
|
||||||
|
```
|
||||||
257
packages/contracts/src/custom-node.ts
Normal file
257
packages/contracts/src/custom-node.ts
Normal file
@ -0,0 +1,257 @@
|
|||||||
|
export const CUSTOM_NODE_CATEGORIES = [
|
||||||
|
"Source",
|
||||||
|
"Transform",
|
||||||
|
"Inspect",
|
||||||
|
"Annotate",
|
||||||
|
"Export",
|
||||||
|
"Utility",
|
||||||
|
] as const;
|
||||||
|
export type CustomNodeCategory = (typeof CUSTOM_NODE_CATEGORIES)[number];
|
||||||
|
|
||||||
|
export const CUSTOM_NODE_INPUT_MODES = ["single_asset_set", "multi_asset_set"] as const;
|
||||||
|
export type CustomNodeInputMode = (typeof CUSTOM_NODE_INPUT_MODES)[number];
|
||||||
|
|
||||||
|
export const CUSTOM_NODE_OUTPUT_MODES = [
|
||||||
|
"report",
|
||||||
|
"asset_set",
|
||||||
|
"asset_set_with_report",
|
||||||
|
] as const;
|
||||||
|
export type CustomNodeOutputMode = (typeof CUSTOM_NODE_OUTPUT_MODES)[number];
|
||||||
|
|
||||||
|
export const CUSTOM_NODE_ARTIFACT_TYPES = ["json", "directory", "video"] as const;
|
||||||
|
export type CustomNodeArtifactType = (typeof CUSTOM_NODE_ARTIFACT_TYPES)[number];
|
||||||
|
|
||||||
|
export type CustomNodeContract = {
|
||||||
|
inputMode: CustomNodeInputMode;
|
||||||
|
outputMode: CustomNodeOutputMode;
|
||||||
|
artifactType: CustomNodeArtifactType;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type CustomNodeEnvelopePreview = {
|
||||||
|
input: {
|
||||||
|
task: {
|
||||||
|
nodeId: string;
|
||||||
|
nodeDefinitionId: string;
|
||||||
|
executorType: "docker";
|
||||||
|
assetIds: string[];
|
||||||
|
};
|
||||||
|
context: {
|
||||||
|
assetIds: string[];
|
||||||
|
assets: Array<{
|
||||||
|
_id: string;
|
||||||
|
displayName: string;
|
||||||
|
sourcePath: string;
|
||||||
|
}>;
|
||||||
|
upstreamResults: Array<{
|
||||||
|
nodeId: string;
|
||||||
|
result: Record<string, unknown>;
|
||||||
|
}>;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
output: {
|
||||||
|
result: Record<string, unknown>;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
export type CustomNodeSource =
|
||||||
|
| {
|
||||||
|
kind: "image";
|
||||||
|
image: string;
|
||||||
|
command?: string[];
|
||||||
|
}
|
||||||
|
| {
|
||||||
|
kind: "dockerfile";
|
||||||
|
dockerfileContent: string;
|
||||||
|
imageTag?: string;
|
||||||
|
command?: string[];
|
||||||
|
};
|
||||||
|
|
||||||
|
export type CustomNodeValidationIssue =
|
||||||
|
| "name_required"
|
||||||
|
| "name_too_long"
|
||||||
|
| "invalid_category"
|
||||||
|
| "invalid_source_kind"
|
||||||
|
| "image_required"
|
||||||
|
| "dockerfile_required"
|
||||||
|
| "dockerfile_missing_from"
|
||||||
|
| "invalid_command"
|
||||||
|
| "invalid_input_mode"
|
||||||
|
| "invalid_output_mode"
|
||||||
|
| "invalid_artifact_type"
|
||||||
|
| "source_cannot_be_multi_input";
|
||||||
|
|
||||||
|
type CustomNodeValidationInput = {
|
||||||
|
name?: unknown;
|
||||||
|
category?: unknown;
|
||||||
|
source?: unknown;
|
||||||
|
contract?: unknown;
|
||||||
|
};
|
||||||
|
|
||||||
|
function isRecord(value: unknown): value is Record<string, unknown> {
|
||||||
|
return typeof value === "object" && value !== null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function includesValue<T extends readonly string[]>(values: T, candidate: unknown): candidate is T[number] {
|
||||||
|
return typeof candidate === "string" && values.includes(candidate);
|
||||||
|
}
|
||||||
|
|
||||||
|
function isCommandArray(value: unknown) {
|
||||||
|
return Array.isArray(value) && value.every((item) => typeof item === "string" && item.trim().length > 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
function hasDockerfileFromInstruction(value: string) {
|
||||||
|
return /^\s*FROM\s+/imu.test(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function formatCustomNodeValidationIssue(issue: CustomNodeValidationIssue) {
|
||||||
|
switch (issue) {
|
||||||
|
case "name_required":
|
||||||
|
return "custom node name is required";
|
||||||
|
case "name_too_long":
|
||||||
|
return "custom node name must be 80 characters or fewer";
|
||||||
|
case "invalid_category":
|
||||||
|
return "custom node category is invalid";
|
||||||
|
case "invalid_source_kind":
|
||||||
|
return "custom node source kind must be image or dockerfile";
|
||||||
|
case "image_required":
|
||||||
|
return "custom node image is required";
|
||||||
|
case "dockerfile_required":
|
||||||
|
return "custom node dockerfileContent is required";
|
||||||
|
case "dockerfile_missing_from":
|
||||||
|
return "custom node dockerfile must include a FROM instruction";
|
||||||
|
case "invalid_command":
|
||||||
|
return "custom node command must be an array of non-empty arguments";
|
||||||
|
case "invalid_input_mode":
|
||||||
|
return "custom node input mode is invalid";
|
||||||
|
case "invalid_output_mode":
|
||||||
|
return "custom node output mode is invalid";
|
||||||
|
case "invalid_artifact_type":
|
||||||
|
return "custom node artifact type is invalid";
|
||||||
|
case "source_cannot_be_multi_input":
|
||||||
|
return "source category custom nodes cannot declare multi_asset_set input";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function validateCustomNodeDefinition(input: CustomNodeValidationInput): CustomNodeValidationIssue[] {
|
||||||
|
const issues: CustomNodeValidationIssue[] = [];
|
||||||
|
const name = typeof input.name === "string" ? input.name.trim() : "";
|
||||||
|
|
||||||
|
if (name.length === 0) {
|
||||||
|
issues.push("name_required");
|
||||||
|
} else if (name.length > 80) {
|
||||||
|
issues.push("name_too_long");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!includesValue(CUSTOM_NODE_CATEGORIES, input.category)) {
|
||||||
|
issues.push("invalid_category");
|
||||||
|
}
|
||||||
|
|
||||||
|
const contract = isRecord(input.contract) ? input.contract : null;
|
||||||
|
const inputMode = contract?.inputMode;
|
||||||
|
const outputMode = contract?.outputMode;
|
||||||
|
const artifactType = contract?.artifactType;
|
||||||
|
|
||||||
|
if (!includesValue(CUSTOM_NODE_INPUT_MODES, inputMode)) {
|
||||||
|
issues.push("invalid_input_mode");
|
||||||
|
}
|
||||||
|
if (!includesValue(CUSTOM_NODE_OUTPUT_MODES, outputMode)) {
|
||||||
|
issues.push("invalid_output_mode");
|
||||||
|
}
|
||||||
|
if (!includesValue(CUSTOM_NODE_ARTIFACT_TYPES, artifactType)) {
|
||||||
|
issues.push("invalid_artifact_type");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (input.category === "Source" && inputMode === "multi_asset_set") {
|
||||||
|
issues.push("source_cannot_be_multi_input");
|
||||||
|
}
|
||||||
|
|
||||||
|
const source = isRecord(input.source) ? input.source : null;
|
||||||
|
if (!source || (source.kind !== "image" && source.kind !== "dockerfile")) {
|
||||||
|
issues.push("invalid_source_kind");
|
||||||
|
return issues;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (source.command !== undefined && !isCommandArray(source.command)) {
|
||||||
|
issues.push("invalid_command");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (source.kind === "image") {
|
||||||
|
if (typeof source.image !== "string" || source.image.trim().length === 0) {
|
||||||
|
issues.push("image_required");
|
||||||
|
}
|
||||||
|
return issues;
|
||||||
|
}
|
||||||
|
|
||||||
|
const dockerfileContent =
|
||||||
|
typeof source.dockerfileContent === "string" ? source.dockerfileContent.trim() : "";
|
||||||
|
if (dockerfileContent.length === 0) {
|
||||||
|
issues.push("dockerfile_required");
|
||||||
|
} else if (!hasDockerfileFromInstruction(dockerfileContent)) {
|
||||||
|
issues.push("dockerfile_missing_from");
|
||||||
|
}
|
||||||
|
|
||||||
|
return issues;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function buildCustomNodeEnvelopePreview(contract: CustomNodeContract): CustomNodeEnvelopePreview {
|
||||||
|
const upstreamResult =
|
||||||
|
contract.inputMode === "multi_asset_set"
|
||||||
|
? {
|
||||||
|
nodeId: "upstream-union-assets",
|
||||||
|
result: {
|
||||||
|
assetIds: ["asset-123"],
|
||||||
|
summary: {
|
||||||
|
keptAssetCount: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
: {
|
||||||
|
nodeId: "upstream-source-asset",
|
||||||
|
result: {
|
||||||
|
assetIds: ["asset-123"],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const result: Record<string, unknown> = {
|
||||||
|
summary: {
|
||||||
|
outcome: "success",
|
||||||
|
processedAssetCount: 1,
|
||||||
|
},
|
||||||
|
artifactType: contract.artifactType,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (contract.outputMode === "asset_set" || contract.outputMode === "asset_set_with_report") {
|
||||||
|
result.assetIds = ["asset-123"];
|
||||||
|
}
|
||||||
|
if (contract.outputMode === "report" || contract.outputMode === "asset_set_with_report") {
|
||||||
|
result.report = {
|
||||||
|
status: "ok",
|
||||||
|
findings: [],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
input: {
|
||||||
|
task: {
|
||||||
|
nodeId: "custom-node-1",
|
||||||
|
nodeDefinitionId: "custom-example",
|
||||||
|
executorType: "docker",
|
||||||
|
assetIds: ["asset-123"],
|
||||||
|
},
|
||||||
|
context: {
|
||||||
|
assetIds: ["asset-123"],
|
||||||
|
assets: [
|
||||||
|
{
|
||||||
|
_id: "asset-123",
|
||||||
|
displayName: "Sample Asset",
|
||||||
|
sourcePath: "/data/sample-asset",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
upstreamResults: [upstreamResult],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
output: {
|
||||||
|
result,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
74
packages/contracts/src/workflow-input.ts
Normal file
74
packages/contracts/src/workflow-input.ts
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
export const WORKFLOW_INPUT_BINDING_KINDS = ["asset", "dataset"] as const;
|
||||||
|
export type WorkflowInputBindingKind = (typeof WORKFLOW_INPUT_BINDING_KINDS)[number];
|
||||||
|
|
||||||
|
export type WorkflowInputBinding = {
|
||||||
|
kind: WorkflowInputBindingKind;
|
||||||
|
id: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function normalizeWorkflowInputBindings(input: {
|
||||||
|
inputBindings?: WorkflowInputBinding[];
|
||||||
|
assetIds?: string[];
|
||||||
|
datasetIds?: string[];
|
||||||
|
}): WorkflowInputBinding[] {
|
||||||
|
const explicitBindings = (input.inputBindings ?? []).filter(
|
||||||
|
(binding): binding is WorkflowInputBinding =>
|
||||||
|
(binding?.kind === "asset" || binding?.kind === "dataset") &&
|
||||||
|
typeof binding.id === "string" &&
|
||||||
|
binding.id.trim().length > 0,
|
||||||
|
);
|
||||||
|
if (explicitBindings.length > 0) {
|
||||||
|
return dedupeWorkflowInputBindings(explicitBindings);
|
||||||
|
}
|
||||||
|
|
||||||
|
return dedupeWorkflowInputBindings([
|
||||||
|
...(input.assetIds ?? []).map((id) => ({ kind: "asset" as const, id })),
|
||||||
|
...(input.datasetIds ?? []).map((id) => ({ kind: "dataset" as const, id })),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function dedupeWorkflowInputBindings(
|
||||||
|
inputBindings: WorkflowInputBinding[],
|
||||||
|
): WorkflowInputBinding[] {
|
||||||
|
const seen = new Set<string>();
|
||||||
|
const result: WorkflowInputBinding[] = [];
|
||||||
|
|
||||||
|
for (const binding of inputBindings) {
|
||||||
|
const normalizedId = binding.id.trim();
|
||||||
|
if (!normalizedId) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const key = `${binding.kind}:${normalizedId}`;
|
||||||
|
if (seen.has(key)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
seen.add(key);
|
||||||
|
result.push({
|
||||||
|
kind: binding.kind,
|
||||||
|
id: normalizedId,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function splitWorkflowInputBindings(inputBindings: WorkflowInputBinding[]): {
|
||||||
|
assetIds: string[];
|
||||||
|
datasetIds: string[];
|
||||||
|
} {
|
||||||
|
const assetIds: string[] = [];
|
||||||
|
const datasetIds: string[] = [];
|
||||||
|
|
||||||
|
for (const binding of inputBindings) {
|
||||||
|
if (binding.kind === "asset") {
|
||||||
|
assetIds.push(binding.id);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
datasetIds.push(binding.id);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
assetIds,
|
||||||
|
datasetIds,
|
||||||
|
};
|
||||||
|
}
|
||||||
Loading…
x
Reference in New Issue
Block a user