✨ feat: add project dataset and workflow template flows
This commit is contained in:
parent
6ee54c8399
commit
71c5fd5995
14
README.md
14
README.md
@ -2,6 +2,16 @@
|
||||
|
||||
EmboFlow is a B/S embodied-data workflow platform for raw asset ingestion, delivery normalization, dataset transformation, workflow execution, preview, and export.
|
||||
|
||||
## Current V1 Features
|
||||
|
||||
- Project-scoped workspace shell with a dedicated Projects page and active project selector in the header
|
||||
- Asset workspace that supports local asset registration, probe summaries, storage connection management, and dataset creation
|
||||
- Workflow templates as first-class objects, including default project templates and creating project workflows from a template
|
||||
- Blank workflow creation and a large React Flow editor with drag-and-drop nodes, free canvas movement, edge validation, node runtime presets, and Python code-hook injection
|
||||
- Workflow-level `Save As Template` so edited graphs can be promoted into reusable project templates
|
||||
- Mongo-backed run orchestration, worker execution, run history, task detail, logs, stdout/stderr, artifacts, cancel, retry, and task retry
|
||||
- Runtime shell level Chinese and English switching
|
||||
|
||||
## Bootstrap
|
||||
|
||||
From the repository root:
|
||||
@ -68,7 +78,11 @@ You can register that directory from the Assets page or via `POST /api/assets/re
|
||||
The workflow editor currently requires selecting at least one registered asset before a run can be created.
|
||||
The editor now also persists per-node runtime config in workflow versions, including executor overrides, optional artifact title overrides, and Python code-hook source for inspect and transform style nodes.
|
||||
The runtime web shell now exposes a visible `中文 / English` language toggle. The core workspace shell and workflow authoring surface are translated through a lightweight i18n layer.
|
||||
The shell now also exposes a dedicated Projects page plus an active project selector, so assets, datasets, workflow templates, workflows, and runs all switch together at the project boundary.
|
||||
The Assets workspace now includes first-class storage connections and datasets. A dataset is distinct from a raw asset and binds project source assets to a selected local or object-storage-backed destination.
|
||||
The Workflows workspace now includes a template gallery. Projects can start from default or saved templates, or create a blank workflow directly.
|
||||
The workflow editor center panel now uses a real draggable node canvas with zoom, pan, mini-map, dotted background, handle-based edge creation, persisted node positions, and localized validation feedback instead of a static list of node cards.
|
||||
The workflow editor right panel now also supports saving the current workflow draft as a reusable workflow template, in addition to editing per-node runtime settings and Python hooks.
|
||||
The node library now supports both click-to-append and drag-and-drop placement into the canvas. V1 connection rules block self-edges, duplicate edges, cycles, incoming edges into source nodes, outgoing edges from export nodes, and multiple upstream edges into a single node.
|
||||
The Runs workspace now shows project-scoped run history, run-level aggregated summaries, cancel/retry controls, and run detail views with persisted task summaries, stdout/stderr sections, result previews, and artifact links into Explore.
|
||||
Selected run tasks now expose the frozen node definition id, executor config snapshot, and code-hook metadata that were captured when the run was created.
|
||||
|
||||
@ -68,6 +68,62 @@ type AssetProbeReportDocument = {
|
||||
createdAt: string;
|
||||
};
|
||||
|
||||
type StorageProvider = "local" | "minio" | "s3" | "bos" | "oss";
|
||||
|
||||
type StorageConnectionDocument = Timestamped & {
|
||||
_id: string;
|
||||
workspaceId: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
provider: StorageProvider;
|
||||
bucket?: string;
|
||||
endpoint?: string;
|
||||
region?: string;
|
||||
basePath?: string;
|
||||
rootPath?: string;
|
||||
status: "active";
|
||||
createdBy: string;
|
||||
};
|
||||
|
||||
type DatasetDocument = Timestamped & {
|
||||
_id: string;
|
||||
workspaceId: string;
|
||||
projectId: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
description: string;
|
||||
status: "draft" | "active";
|
||||
sourceAssetIds: string[];
|
||||
storageConnectionId: string;
|
||||
storagePath: string;
|
||||
latestVersionId: string;
|
||||
latestVersionNumber: number;
|
||||
createdBy: string;
|
||||
summary: Record<string, unknown>;
|
||||
};
|
||||
|
||||
type DatasetVersionDocument = {
|
||||
_id: string;
|
||||
datasetId: string;
|
||||
workspaceId: string;
|
||||
projectId: string;
|
||||
versionNumber: number;
|
||||
sourceAssetIds: string[];
|
||||
storageSnapshot: {
|
||||
storageConnectionId: string;
|
||||
provider: StorageProvider;
|
||||
bucket?: string;
|
||||
endpoint?: string;
|
||||
region?: string;
|
||||
basePath?: string;
|
||||
rootPath?: string;
|
||||
storagePath: string;
|
||||
};
|
||||
summary: Record<string, unknown>;
|
||||
createdBy: string;
|
||||
createdAt: string;
|
||||
};
|
||||
|
||||
type WorkflowDefinitionDocument = Timestamped & {
|
||||
_id: string;
|
||||
workspaceId: string;
|
||||
@ -149,6 +205,24 @@ type ArtifactDocument = Timestamped & {
|
||||
payload: Record<string, unknown>;
|
||||
};
|
||||
|
||||
type WorkflowTemplateDocument = Timestamped & {
|
||||
_id: string;
|
||||
workspaceId: string;
|
||||
projectId?: string;
|
||||
name: string;
|
||||
slug: string;
|
||||
description: string;
|
||||
status: "active";
|
||||
visualGraph: Record<string, unknown>;
|
||||
logicGraph: {
|
||||
nodes: Array<{ id: string; type: string }>;
|
||||
edges: Array<{ from: string; to: string }>;
|
||||
};
|
||||
runtimeGraph: Record<string, unknown>;
|
||||
pluginRefs: string[];
|
||||
createdBy: string;
|
||||
};
|
||||
|
||||
type WorkflowRuntimeGraph = Record<string, unknown> & {
|
||||
selectedPreset?: string;
|
||||
nodeBindings?: Record<string, string>;
|
||||
@ -320,9 +394,105 @@ function collectRetryNodeIds(tasks: RunTaskDocument[], rootNodeId: string) {
|
||||
return collected;
|
||||
}
|
||||
|
||||
function createStorageSnapshot(
|
||||
connection: StorageConnectionDocument,
|
||||
storagePath: string,
|
||||
): DatasetVersionDocument["storageSnapshot"] {
|
||||
return {
|
||||
storageConnectionId: connection._id,
|
||||
provider: connection.provider,
|
||||
bucket: connection.bucket,
|
||||
endpoint: connection.endpoint,
|
||||
region: connection.region,
|
||||
basePath: connection.basePath,
|
||||
rootPath: connection.rootPath,
|
||||
storagePath,
|
||||
};
|
||||
}
|
||||
|
||||
export class MongoAppStore {
|
||||
constructor(private readonly db: Db) {}
|
||||
|
||||
private async ensureDefaultStorageConnection(workspaceId: string, createdBy: string) {
|
||||
const collection = this.db.collection<StorageConnectionDocument>("storage_connections");
|
||||
const existing = await collection.findOne({
|
||||
workspaceId,
|
||||
slug: "local-workspace-storage",
|
||||
});
|
||||
if (existing) {
|
||||
return existing;
|
||||
}
|
||||
|
||||
const connection: StorageConnectionDocument = {
|
||||
_id: `storage-${randomUUID()}`,
|
||||
workspaceId,
|
||||
name: "Local Workspace Storage",
|
||||
slug: "local-workspace-storage",
|
||||
provider: "local",
|
||||
rootPath: "/Users/longtaowu/workspace/emboldata",
|
||||
status: "active",
|
||||
createdBy,
|
||||
createdAt: nowIso(),
|
||||
updatedAt: nowIso(),
|
||||
};
|
||||
await collection.insertOne(connection);
|
||||
return connection;
|
||||
}
|
||||
|
||||
private async ensureDefaultWorkflowTemplate(
|
||||
workspaceId: string,
|
||||
projectId: string,
|
||||
createdBy: string,
|
||||
) {
|
||||
const collection = this.db.collection<WorkflowTemplateDocument>("workflow_templates");
|
||||
const existing = await collection.findOne({
|
||||
workspaceId,
|
||||
projectId,
|
||||
slug: "delivery-normalization-template",
|
||||
});
|
||||
if (existing) {
|
||||
return existing;
|
||||
}
|
||||
|
||||
const template: WorkflowTemplateDocument = {
|
||||
_id: `template-${randomUUID()}`,
|
||||
workspaceId,
|
||||
projectId,
|
||||
name: "Delivery Normalization Template",
|
||||
slug: "delivery-normalization-template",
|
||||
description: "Default delivery normalization pipeline with source, validation, and export steps.",
|
||||
status: "active",
|
||||
visualGraph: {
|
||||
viewport: { x: 0, y: 0, zoom: 1 },
|
||||
nodePositions: {
|
||||
"source-asset": { x: 120, y: 120 },
|
||||
"validate-structure": { x: 440, y: 240 },
|
||||
"export-delivery-package": { x: 780, y: 360 },
|
||||
},
|
||||
},
|
||||
logicGraph: {
|
||||
nodes: [
|
||||
{ id: "source-asset", type: "source" },
|
||||
{ id: "validate-structure", type: "inspect" },
|
||||
{ id: "export-delivery-package", type: "export" },
|
||||
],
|
||||
edges: [
|
||||
{ from: "source-asset", to: "validate-structure" },
|
||||
{ from: "validate-structure", to: "export-delivery-package" },
|
||||
],
|
||||
},
|
||||
runtimeGraph: {
|
||||
selectedPreset: "delivery-normalization",
|
||||
},
|
||||
pluginRefs: ["builtin:delivery-nodes"],
|
||||
createdBy,
|
||||
createdAt: nowIso(),
|
||||
updatedAt: nowIso(),
|
||||
};
|
||||
await collection.insertOne(template);
|
||||
return template;
|
||||
}
|
||||
|
||||
async bootstrapDevContext(input: {
|
||||
userId?: string;
|
||||
workspaceName?: string;
|
||||
@ -387,6 +557,9 @@ export class MongoAppStore {
|
||||
throw new Error("failed to bootstrap project");
|
||||
}
|
||||
|
||||
await this.ensureDefaultStorageConnection(workspace._id, userId);
|
||||
await this.ensureDefaultWorkflowTemplate(workspace._id, project._id, userId);
|
||||
|
||||
return {
|
||||
userId,
|
||||
workspace: mapDoc(workspace as WithId<WorkspaceDocument>),
|
||||
@ -420,6 +593,7 @@ export class MongoAppStore {
|
||||
updatedAt: nowIso(),
|
||||
};
|
||||
await this.db.collection("projects").insertOne(project);
|
||||
await this.ensureDefaultWorkflowTemplate(project.workspaceId, project._id, input.createdBy);
|
||||
return project;
|
||||
}
|
||||
|
||||
@ -431,6 +605,51 @@ export class MongoAppStore {
|
||||
.toArray();
|
||||
}
|
||||
|
||||
async createStorageConnection(input: {
|
||||
workspaceId: string;
|
||||
name: string;
|
||||
provider: StorageProvider;
|
||||
bucket?: string;
|
||||
endpoint?: string;
|
||||
region?: string;
|
||||
basePath?: string;
|
||||
rootPath?: string;
|
||||
createdBy: string;
|
||||
}) {
|
||||
const connection: StorageConnectionDocument = {
|
||||
_id: `storage-${randomUUID()}`,
|
||||
workspaceId: input.workspaceId,
|
||||
name: input.name,
|
||||
slug: slugify(input.name),
|
||||
provider: input.provider,
|
||||
bucket: input.bucket,
|
||||
endpoint: input.endpoint,
|
||||
region: input.region,
|
||||
basePath: input.basePath,
|
||||
rootPath: input.rootPath,
|
||||
status: "active",
|
||||
createdBy: input.createdBy,
|
||||
createdAt: nowIso(),
|
||||
updatedAt: nowIso(),
|
||||
};
|
||||
await this.db.collection("storage_connections").insertOne(connection);
|
||||
return connection;
|
||||
}
|
||||
|
||||
async listStorageConnections(workspaceId: string) {
|
||||
return this.db
|
||||
.collection<StorageConnectionDocument>("storage_connections")
|
||||
.find({ workspaceId, status: "active" })
|
||||
.sort({ createdAt: 1 })
|
||||
.toArray();
|
||||
}
|
||||
|
||||
async getStorageConnection(storageConnectionId: string) {
|
||||
return this.db
|
||||
.collection<StorageConnectionDocument>("storage_connections")
|
||||
.findOne({ _id: storageConnectionId });
|
||||
}
|
||||
|
||||
async registerAsset(input: {
|
||||
workspaceId: string;
|
||||
projectId: string;
|
||||
@ -532,6 +751,158 @@ export class MongoAppStore {
|
||||
.next();
|
||||
}
|
||||
|
||||
async createDataset(input: {
|
||||
workspaceId: string;
|
||||
projectId: string;
|
||||
name: string;
|
||||
description?: string;
|
||||
sourceAssetIds: string[];
|
||||
storageConnectionId: string;
|
||||
storagePath: string;
|
||||
createdBy: string;
|
||||
}) {
|
||||
const assetIds = Array.from(new Set((input.sourceAssetIds ?? []).filter(Boolean)));
|
||||
if (assetIds.length === 0) {
|
||||
throw new Error("sourceAssetIds must include at least one asset");
|
||||
}
|
||||
|
||||
const assets = await this.db
|
||||
.collection<AssetDocument>("assets")
|
||||
.find({ _id: { $in: assetIds } })
|
||||
.toArray();
|
||||
if (assets.length !== assetIds.length) {
|
||||
throw new Error("one or more source assets do not exist");
|
||||
}
|
||||
if (assets.some((asset) => asset.projectId !== input.projectId)) {
|
||||
throw new Error("source assets must belong to the dataset project");
|
||||
}
|
||||
|
||||
const storageConnection = await this.getStorageConnection(input.storageConnectionId);
|
||||
if (!storageConnection) {
|
||||
throw new Error(`storage connection not found: ${input.storageConnectionId}`);
|
||||
}
|
||||
if (storageConnection.workspaceId !== input.workspaceId) {
|
||||
throw new Error("storage connection must belong to the dataset workspace");
|
||||
}
|
||||
|
||||
const createdAt = nowIso();
|
||||
const datasetId = `dataset-${randomUUID()}`;
|
||||
const versionId = `${datasetId}-v1`;
|
||||
const summary = {
|
||||
sourceAssetCount: assetIds.length,
|
||||
storageProvider: storageConnection.provider,
|
||||
storagePath: input.storagePath,
|
||||
};
|
||||
|
||||
const dataset: DatasetDocument = {
|
||||
_id: datasetId,
|
||||
workspaceId: input.workspaceId,
|
||||
projectId: input.projectId,
|
||||
name: input.name,
|
||||
slug: slugify(input.name),
|
||||
description: input.description ?? "",
|
||||
status: "active",
|
||||
sourceAssetIds: assetIds,
|
||||
storageConnectionId: storageConnection._id,
|
||||
storagePath: input.storagePath,
|
||||
latestVersionId: versionId,
|
||||
latestVersionNumber: 1,
|
||||
createdBy: input.createdBy,
|
||||
createdAt,
|
||||
updatedAt: createdAt,
|
||||
summary,
|
||||
};
|
||||
const version: DatasetVersionDocument = {
|
||||
_id: versionId,
|
||||
datasetId,
|
||||
workspaceId: input.workspaceId,
|
||||
projectId: input.projectId,
|
||||
versionNumber: 1,
|
||||
sourceAssetIds: assetIds,
|
||||
storageSnapshot: createStorageSnapshot(storageConnection, input.storagePath),
|
||||
summary,
|
||||
createdBy: input.createdBy,
|
||||
createdAt,
|
||||
};
|
||||
|
||||
await this.db.collection("datasets").insertOne(dataset);
|
||||
await this.db.collection("dataset_versions").insertOne(version);
|
||||
return dataset;
|
||||
}
|
||||
|
||||
async listDatasets(projectId: string) {
|
||||
return this.db
|
||||
.collection<DatasetDocument>("datasets")
|
||||
.find({ projectId })
|
||||
.sort({ createdAt: -1 })
|
||||
.toArray();
|
||||
}
|
||||
|
||||
async getDataset(datasetId: string) {
|
||||
return this.db.collection<DatasetDocument>("datasets").findOne({ _id: datasetId });
|
||||
}
|
||||
|
||||
async listDatasetVersions(datasetId: string) {
|
||||
return this.db
|
||||
.collection<DatasetVersionDocument>("dataset_versions")
|
||||
.find({ datasetId })
|
||||
.sort({ versionNumber: -1 })
|
||||
.toArray();
|
||||
}
|
||||
|
||||
async createWorkflowTemplate(input: {
|
||||
workspaceId: string;
|
||||
projectId?: string;
|
||||
name: string;
|
||||
description?: string;
|
||||
visualGraph: Record<string, unknown>;
|
||||
logicGraph: WorkflowTemplateDocument["logicGraph"];
|
||||
runtimeGraph: Record<string, unknown>;
|
||||
pluginRefs: string[];
|
||||
createdBy: string;
|
||||
}) {
|
||||
const template: WorkflowTemplateDocument = {
|
||||
_id: `template-${randomUUID()}`,
|
||||
workspaceId: input.workspaceId,
|
||||
projectId: input.projectId,
|
||||
name: input.name,
|
||||
slug: slugify(input.name),
|
||||
description: input.description ?? "",
|
||||
status: "active",
|
||||
visualGraph: input.visualGraph,
|
||||
logicGraph: input.logicGraph,
|
||||
runtimeGraph: input.runtimeGraph,
|
||||
pluginRefs: input.pluginRefs,
|
||||
createdBy: input.createdBy,
|
||||
createdAt: nowIso(),
|
||||
updatedAt: nowIso(),
|
||||
};
|
||||
await this.db.collection("workflow_templates").insertOne(template);
|
||||
return template;
|
||||
}
|
||||
|
||||
async listWorkflowTemplates(input: { workspaceId: string; projectId?: string }) {
|
||||
const filter: Record<string, unknown> = {
|
||||
workspaceId: input.workspaceId,
|
||||
status: "active",
|
||||
};
|
||||
if (input.projectId) {
|
||||
filter.$or = [{ projectId: input.projectId }, { projectId: { $exists: false } }];
|
||||
}
|
||||
|
||||
return this.db
|
||||
.collection<WorkflowTemplateDocument>("workflow_templates")
|
||||
.find(filter)
|
||||
.sort({ createdAt: -1 })
|
||||
.toArray();
|
||||
}
|
||||
|
||||
async getWorkflowTemplate(templateId: string) {
|
||||
return this.db
|
||||
.collection<WorkflowTemplateDocument>("workflow_templates")
|
||||
.findOne({ _id: templateId });
|
||||
}
|
||||
|
||||
async createWorkflowDefinition(input: {
|
||||
workspaceId: string;
|
||||
projectId: string;
|
||||
@ -555,6 +926,43 @@ export class MongoAppStore {
|
||||
return definition;
|
||||
}
|
||||
|
||||
async createWorkflowFromTemplate(input: {
|
||||
templateId: string;
|
||||
workspaceId: string;
|
||||
projectId: string;
|
||||
name: string;
|
||||
createdBy: string;
|
||||
}) {
|
||||
const template = await this.getWorkflowTemplate(input.templateId);
|
||||
if (!template) {
|
||||
throw new Error(`workflow template not found: ${input.templateId}`);
|
||||
}
|
||||
if (template.workspaceId !== input.workspaceId) {
|
||||
throw new Error("workflow template must belong to the target workspace");
|
||||
}
|
||||
if (template.projectId && template.projectId !== input.projectId) {
|
||||
throw new Error("workflow template must belong to the target project or be workspace-scoped");
|
||||
}
|
||||
|
||||
const definition = await this.createWorkflowDefinition({
|
||||
workspaceId: input.workspaceId,
|
||||
projectId: input.projectId,
|
||||
name: input.name,
|
||||
createdBy: input.createdBy,
|
||||
});
|
||||
|
||||
await this.saveWorkflowVersion({
|
||||
workflowDefinitionId: definition._id,
|
||||
visualGraph: template.visualGraph,
|
||||
logicGraph: template.logicGraph,
|
||||
runtimeGraph: template.runtimeGraph,
|
||||
pluginRefs: template.pluginRefs,
|
||||
createdBy: input.createdBy,
|
||||
});
|
||||
|
||||
return this.getWorkflowDefinition(definition._id);
|
||||
}
|
||||
|
||||
async listWorkflowDefinitions(projectId: string) {
|
||||
return this.db
|
||||
.collection<WorkflowDefinitionDocument>("workflow_definitions")
|
||||
|
||||
@ -94,6 +94,34 @@ export async function createApiRuntime(config = resolveApiRuntimeConfig()) {
|
||||
}
|
||||
});
|
||||
|
||||
app.post("/api/storage-connections", async (request, response, next) => {
|
||||
try {
|
||||
response.json(
|
||||
await store.createStorageConnection({
|
||||
workspaceId: request.body.workspaceId,
|
||||
name: request.body.name,
|
||||
provider: request.body.provider,
|
||||
bucket: request.body.bucket,
|
||||
endpoint: request.body.endpoint,
|
||||
region: request.body.region,
|
||||
basePath: request.body.basePath,
|
||||
rootPath: request.body.rootPath,
|
||||
createdBy: request.body.createdBy ?? "local-user",
|
||||
}),
|
||||
);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
app.get("/api/storage-connections", async (request, response, next) => {
|
||||
try {
|
||||
response.json(await store.listStorageConnections(String(request.query.workspaceId)));
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
app.post("/api/assets/register", async (request, response, next) => {
|
||||
try {
|
||||
const sourcePath = request.body.sourcePath as string | undefined;
|
||||
@ -158,10 +186,120 @@ export async function createApiRuntime(config = resolveApiRuntimeConfig()) {
|
||||
}
|
||||
});
|
||||
|
||||
app.post("/api/datasets", async (request, response, next) => {
|
||||
try {
|
||||
response.json(
|
||||
await store.createDataset({
|
||||
workspaceId: request.body.workspaceId,
|
||||
projectId: request.body.projectId,
|
||||
name: request.body.name,
|
||||
description: request.body.description,
|
||||
sourceAssetIds: request.body.sourceAssetIds ?? [],
|
||||
storageConnectionId: request.body.storageConnectionId,
|
||||
storagePath: request.body.storagePath,
|
||||
createdBy: request.body.createdBy ?? "local-user",
|
||||
}),
|
||||
);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
app.get("/api/datasets", async (request, response, next) => {
|
||||
try {
|
||||
response.json(await store.listDatasets(String(request.query.projectId)));
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
app.get("/api/datasets/:datasetId", async (request, response, next) => {
|
||||
try {
|
||||
const dataset = await store.getDataset(request.params.datasetId);
|
||||
if (!dataset) {
|
||||
response.status(404).json({ message: "dataset not found" });
|
||||
return;
|
||||
}
|
||||
response.json(dataset);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
app.get("/api/datasets/:datasetId/versions", async (request, response, next) => {
|
||||
try {
|
||||
response.json(await store.listDatasetVersions(request.params.datasetId));
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
app.get("/api/node-definitions", (_request, response) => {
|
||||
response.json(store.listNodeDefinitions());
|
||||
});
|
||||
|
||||
app.post("/api/workflow-templates", async (request, response, next) => {
|
||||
try {
|
||||
response.json(
|
||||
await store.createWorkflowTemplate({
|
||||
workspaceId: request.body.workspaceId,
|
||||
projectId: request.body.projectId,
|
||||
name: request.body.name,
|
||||
description: request.body.description,
|
||||
visualGraph: request.body.visualGraph ?? {},
|
||||
logicGraph: request.body.logicGraph,
|
||||
runtimeGraph: request.body.runtimeGraph ?? {},
|
||||
pluginRefs: request.body.pluginRefs ?? [],
|
||||
createdBy: request.body.createdBy ?? "local-user",
|
||||
}),
|
||||
);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
app.get("/api/workflow-templates", async (request, response, next) => {
|
||||
try {
|
||||
response.json(
|
||||
await store.listWorkflowTemplates({
|
||||
workspaceId: String(request.query.workspaceId),
|
||||
projectId: request.query.projectId ? String(request.query.projectId) : undefined,
|
||||
}),
|
||||
);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
app.get("/api/workflow-templates/:templateId", async (request, response, next) => {
|
||||
try {
|
||||
const template = await store.getWorkflowTemplate(request.params.templateId);
|
||||
if (!template) {
|
||||
response.status(404).json({ message: "workflow template not found" });
|
||||
return;
|
||||
}
|
||||
response.json(template);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
app.post("/api/workflow-templates/:templateId/workflows", async (request, response, next) => {
|
||||
try {
|
||||
response.json(
|
||||
await store.createWorkflowFromTemplate({
|
||||
templateId: request.params.templateId,
|
||||
workspaceId: request.body.workspaceId,
|
||||
projectId: request.body.projectId,
|
||||
name: request.body.name,
|
||||
createdBy: request.body.createdBy ?? "local-user",
|
||||
}),
|
||||
);
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
});
|
||||
|
||||
app.post("/api/workflows", async (request, response, next) => {
|
||||
try {
|
||||
response.json(
|
||||
|
||||
@ -96,6 +96,67 @@ test("mongo-backed runtime reuses bootstrapped workspace and project across rest
|
||||
assert.equal(projects[0]?._id, bootstrap.project._id);
|
||||
});
|
||||
|
||||
test("mongo-backed runtime provisions a default workflow template for newly created projects", async (t) => {
|
||||
const mongod = await MongoMemoryServer.create({
|
||||
instance: {
|
||||
ip: "127.0.0.1",
|
||||
port: 27217,
|
||||
},
|
||||
});
|
||||
t.after(async () => {
|
||||
await mongod.stop();
|
||||
});
|
||||
|
||||
const server = await startRuntimeServer({
|
||||
host: "127.0.0.1",
|
||||
port: 0,
|
||||
mongoUri: mongod.getUri(),
|
||||
database: "emboflow-runtime-project-template",
|
||||
corsOrigin: "http://127.0.0.1:3000",
|
||||
});
|
||||
t.after(async () => {
|
||||
await server.close();
|
||||
});
|
||||
|
||||
const bootstrap = await readJson<{
|
||||
workspace: { _id: string };
|
||||
}>(
|
||||
await fetch(`${server.baseUrl}/api/dev/bootstrap`, {
|
||||
method: "POST",
|
||||
headers: { "content-type": "application/json" },
|
||||
body: JSON.stringify({ userId: "project-template-user", projectName: "Seed Project" }),
|
||||
}),
|
||||
);
|
||||
|
||||
const project = await readJson<{ _id: string }>(
|
||||
await fetch(`${server.baseUrl}/api/projects`, {
|
||||
method: "POST",
|
||||
headers: { "content-type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
workspaceId: bootstrap.workspace._id,
|
||||
name: "Second Project",
|
||||
description: "Project created after bootstrap",
|
||||
createdBy: "project-template-user",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
const templates = await readJson<Array<{ slug: string; projectId?: string }>>(
|
||||
await fetch(
|
||||
`${server.baseUrl}/api/workflow-templates?workspaceId=${encodeURIComponent(bootstrap.workspace._id)}&projectId=${encodeURIComponent(project._id)}`,
|
||||
),
|
||||
);
|
||||
|
||||
assert.equal(
|
||||
templates.some((template) => template.projectId === project._id),
|
||||
true,
|
||||
);
|
||||
assert.equal(
|
||||
templates.some((template) => template.slug === "delivery-normalization-template"),
|
||||
true,
|
||||
);
|
||||
});
|
||||
|
||||
test("mongo-backed runtime persists probed assets and workflow runs through the HTTP API", async (t) => {
|
||||
const sourceDir = await mkdtemp(path.join(os.tmpdir(), "emboflow-runtime-"));
|
||||
await mkdir(path.join(sourceDir, "DJI_001"));
|
||||
@ -826,6 +887,191 @@ test("mongo-backed runtime exposes persisted task execution summaries and logs",
|
||||
});
|
||||
});
|
||||
|
||||
test("mongo-backed runtime supports storage connections, datasets, workflow templates, and workflow creation from templates", async (t) => {
|
||||
const sourceDir = await mkdtemp(path.join(os.tmpdir(), "emboflow-runtime-datasets-"));
|
||||
await mkdir(path.join(sourceDir, "DJI_001"));
|
||||
await writeFile(path.join(sourceDir, "meta.json"), "{}");
|
||||
await writeFile(path.join(sourceDir, "intrinsics.json"), "{}");
|
||||
await writeFile(path.join(sourceDir, "video_meta.json"), "{}");
|
||||
await writeFile(path.join(sourceDir, "DJI_001", "DJI_001.mp4"), "");
|
||||
|
||||
const mongod = await MongoMemoryServer.create({
|
||||
instance: {
|
||||
ip: "127.0.0.1",
|
||||
port: 27125,
|
||||
},
|
||||
});
|
||||
t.after(async () => {
|
||||
await mongod.stop();
|
||||
});
|
||||
|
||||
const server = await startRuntimeServer({
|
||||
host: "127.0.0.1",
|
||||
port: 0,
|
||||
mongoUri: mongod.getUri(),
|
||||
database: "emboflow-runtime-datasets-templates",
|
||||
corsOrigin: "http://127.0.0.1:3000",
|
||||
});
|
||||
t.after(async () => {
|
||||
await server.close();
|
||||
});
|
||||
|
||||
const bootstrap = await readJson<{
|
||||
workspace: { _id: string };
|
||||
project: { _id: string };
|
||||
}>(
|
||||
await fetch(`${server.baseUrl}/api/dev/bootstrap`, {
|
||||
method: "POST",
|
||||
headers: { "content-type": "application/json" },
|
||||
body: JSON.stringify({ userId: "dataset-user", projectName: "Dataset Project" }),
|
||||
}),
|
||||
);
|
||||
|
||||
const connections = await readJson<Array<{ _id: string; provider: string; name: string }>>(
|
||||
await fetch(
|
||||
`${server.baseUrl}/api/storage-connections?workspaceId=${encodeURIComponent(bootstrap.workspace._id)}`,
|
||||
),
|
||||
);
|
||||
|
||||
const cloudConnection = await readJson<{ _id: string; provider: string; bucket: string }>(
|
||||
await fetch(`${server.baseUrl}/api/storage-connections`, {
|
||||
method: "POST",
|
||||
headers: { "content-type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
workspaceId: bootstrap.workspace._id,
|
||||
name: "Project OSS",
|
||||
provider: "oss",
|
||||
bucket: "emboflow-datasets",
|
||||
endpoint: "oss-cn-hangzhou.aliyuncs.com",
|
||||
basePath: "datasets/project-a",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
const asset = await readJson<{ _id: string; displayName: string }>(
|
||||
await fetch(`${server.baseUrl}/api/assets/register`, {
|
||||
method: "POST",
|
||||
headers: { "content-type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
workspaceId: bootstrap.workspace._id,
|
||||
projectId: bootstrap.project._id,
|
||||
sourcePath: sourceDir,
|
||||
}),
|
||||
}),
|
||||
);
|
||||
await readJson(await fetch(`${server.baseUrl}/api/assets/${asset._id}/probe`, { method: "POST" }));
|
||||
|
||||
const dataset = await readJson<{
|
||||
_id: string;
|
||||
latestVersionNumber: number;
|
||||
storageConnectionId: string;
|
||||
}>(
|
||||
await fetch(`${server.baseUrl}/api/datasets`, {
|
||||
method: "POST",
|
||||
headers: { "content-type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
workspaceId: bootstrap.workspace._id,
|
||||
projectId: bootstrap.project._id,
|
||||
name: "Delivery Dataset",
|
||||
description: "Dataset derived from the probed delivery asset",
|
||||
sourceAssetIds: [asset._id],
|
||||
storageConnectionId: cloudConnection._id,
|
||||
storagePath: "delivery/dataset-v1",
|
||||
createdBy: "dataset-user",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
const datasets = await readJson<Array<{ _id: string; latestVersionNumber: number }>>(
|
||||
await fetch(`${server.baseUrl}/api/datasets?projectId=${encodeURIComponent(bootstrap.project._id)}`),
|
||||
);
|
||||
const datasetVersions = await readJson<Array<{ datasetId: string; versionNumber: number }>>(
|
||||
await fetch(`${server.baseUrl}/api/datasets/${dataset._id}/versions`),
|
||||
);
|
||||
|
||||
const template = await readJson<{ _id: string; name: string }>(
|
||||
await fetch(`${server.baseUrl}/api/workflow-templates`, {
|
||||
method: "POST",
|
||||
headers: { "content-type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
workspaceId: bootstrap.workspace._id,
|
||||
projectId: bootstrap.project._id,
|
||||
name: "Delivery Review Template",
|
||||
description: "Template with inspect and export nodes",
|
||||
visualGraph: {
|
||||
viewport: { x: 0, y: 0, zoom: 1 },
|
||||
nodePositions: {
|
||||
"source-asset": { x: 120, y: 120 },
|
||||
"validate-structure": { x: 460, y: 220 },
|
||||
"export-delivery-package": { x: 820, y: 340 },
|
||||
},
|
||||
},
|
||||
logicGraph: {
|
||||
nodes: [
|
||||
{ id: "source-asset", type: "source" },
|
||||
{ id: "validate-structure", type: "inspect" },
|
||||
{ id: "export-delivery-package", type: "export" },
|
||||
],
|
||||
edges: [
|
||||
{ from: "source-asset", to: "validate-structure" },
|
||||
{ from: "validate-structure", to: "export-delivery-package" },
|
||||
],
|
||||
},
|
||||
runtimeGraph: {
|
||||
selectedPreset: "delivery-template",
|
||||
nodeConfigs: {
|
||||
"validate-structure": {
|
||||
executorType: "python",
|
||||
},
|
||||
},
|
||||
},
|
||||
pluginRefs: ["builtin:delivery-nodes"],
|
||||
createdBy: "dataset-user",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
const templates = await readJson<Array<{ _id: string; name: string }>>(
|
||||
await fetch(
|
||||
`${server.baseUrl}/api/workflow-templates?workspaceId=${encodeURIComponent(bootstrap.workspace._id)}&projectId=${encodeURIComponent(bootstrap.project._id)}`,
|
||||
),
|
||||
);
|
||||
|
||||
const workflowFromTemplate = await readJson<{ _id: string; name: string; latestVersionNumber: number }>(
|
||||
await fetch(`${server.baseUrl}/api/workflow-templates/${template._id}/workflows`, {
|
||||
method: "POST",
|
||||
headers: { "content-type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
workspaceId: bootstrap.workspace._id,
|
||||
projectId: bootstrap.project._id,
|
||||
name: "Delivery Review Flow",
|
||||
createdBy: "dataset-user",
|
||||
}),
|
||||
}),
|
||||
);
|
||||
|
||||
const workflowVersions = await readJson<Array<{ versionNumber: number; runtimeGraph?: { selectedPreset?: string } }>>(
|
||||
await fetch(`${server.baseUrl}/api/workflows/${workflowFromTemplate._id}/versions`),
|
||||
);
|
||||
|
||||
assert.equal(connections[0]?.provider, "local");
|
||||
assert.equal(cloudConnection.provider, "oss");
|
||||
assert.equal(cloudConnection.bucket, "emboflow-datasets");
|
||||
assert.equal(dataset.storageConnectionId, cloudConnection._id);
|
||||
assert.equal(dataset.latestVersionNumber, 1);
|
||||
assert.equal(datasets.length, 1);
|
||||
assert.equal(datasets[0]?._id, dataset._id);
|
||||
assert.equal(datasetVersions.length, 1);
|
||||
assert.equal(datasetVersions[0]?.datasetId, dataset._id);
|
||||
assert.equal(datasetVersions[0]?.versionNumber, 1);
|
||||
assert.equal(template.name, "Delivery Review Template");
|
||||
assert.equal(templates.some((item) => item._id === template._id), true);
|
||||
assert.equal(workflowFromTemplate.latestVersionNumber, 1);
|
||||
assert.equal(workflowVersions.length, 1);
|
||||
assert.equal(workflowVersions[0]?.versionNumber, 1);
|
||||
assert.equal(workflowVersions[0]?.runtimeGraph?.selectedPreset, "delivery-template");
|
||||
});
|
||||
|
||||
test("mongo-backed runtime can cancel a run, retry a run snapshot, and retry a failed task", async (t) => {
|
||||
const mongod = await MongoMemoryServer.create({
|
||||
instance: {
|
||||
|
||||
@ -24,12 +24,61 @@ export class ApiClient {
|
||||
return readJson<BootstrapContext>(response);
|
||||
}
|
||||
|
||||
async listProjects(workspaceId: string) {
|
||||
return readJson<any[]>(
|
||||
await fetch(`${this.baseUrl}/api/projects?workspaceId=${encodeURIComponent(workspaceId)}`),
|
||||
);
|
||||
}
|
||||
|
||||
async createProject(input: {
|
||||
workspaceId: string;
|
||||
name: string;
|
||||
description?: string;
|
||||
createdBy?: string;
|
||||
}) {
|
||||
return readJson<any>(
|
||||
await fetch(`${this.baseUrl}/api/projects`, {
|
||||
method: "POST",
|
||||
headers: { "content-type": "application/json" },
|
||||
body: JSON.stringify(input),
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
async listAssets(projectId: string) {
|
||||
return readJson<any[]>(
|
||||
await fetch(`${this.baseUrl}/api/assets?projectId=${encodeURIComponent(projectId)}`),
|
||||
);
|
||||
}
|
||||
|
||||
async listStorageConnections(workspaceId: string) {
|
||||
return readJson<any[]>(
|
||||
await fetch(
|
||||
`${this.baseUrl}/api/storage-connections?workspaceId=${encodeURIComponent(workspaceId)}`,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
async createStorageConnection(input: {
|
||||
workspaceId: string;
|
||||
name: string;
|
||||
provider: "local" | "minio" | "s3" | "bos" | "oss";
|
||||
bucket?: string;
|
||||
endpoint?: string;
|
||||
region?: string;
|
||||
basePath?: string;
|
||||
rootPath?: string;
|
||||
createdBy?: string;
|
||||
}) {
|
||||
return readJson<any>(
|
||||
await fetch(`${this.baseUrl}/api/storage-connections`, {
|
||||
method: "POST",
|
||||
headers: { "content-type": "application/json" },
|
||||
body: JSON.stringify(input),
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
async registerLocalAsset(input: {
|
||||
workspaceId: string;
|
||||
projectId: string;
|
||||
@ -61,6 +110,41 @@ export class ApiClient {
|
||||
);
|
||||
}
|
||||
|
||||
async listDatasets(projectId: string) {
|
||||
return readJson<any[]>(
|
||||
await fetch(`${this.baseUrl}/api/datasets?projectId=${encodeURIComponent(projectId)}`),
|
||||
);
|
||||
}
|
||||
|
||||
async createDataset(input: {
|
||||
workspaceId: string;
|
||||
projectId: string;
|
||||
name: string;
|
||||
description?: string;
|
||||
sourceAssetIds: string[];
|
||||
storageConnectionId: string;
|
||||
storagePath: string;
|
||||
createdBy?: string;
|
||||
}) {
|
||||
return readJson<any>(
|
||||
await fetch(`${this.baseUrl}/api/datasets`, {
|
||||
method: "POST",
|
||||
headers: { "content-type": "application/json" },
|
||||
body: JSON.stringify(input),
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
async getDataset(datasetId: string) {
|
||||
return readJson<any>(await fetch(`${this.baseUrl}/api/datasets/${datasetId}`));
|
||||
}
|
||||
|
||||
async listDatasetVersions(datasetId: string) {
|
||||
return readJson<any[]>(
|
||||
await fetch(`${this.baseUrl}/api/datasets/${datasetId}/versions`),
|
||||
);
|
||||
}
|
||||
|
||||
async listWorkflows(projectId: string) {
|
||||
return readJson<any[]>(
|
||||
await fetch(`${this.baseUrl}/api/workflows?projectId=${encodeURIComponent(projectId)}`),
|
||||
@ -107,6 +191,60 @@ export class ApiClient {
|
||||
return readJson<any[]>(await fetch(`${this.baseUrl}/api/node-definitions`));
|
||||
}
|
||||
|
||||
async listWorkflowTemplates(input: {
|
||||
workspaceId: string;
|
||||
projectId?: string;
|
||||
}) {
|
||||
const search = new URLSearchParams({ workspaceId: input.workspaceId });
|
||||
if (input.projectId) {
|
||||
search.set("projectId", input.projectId);
|
||||
}
|
||||
return readJson<any[]>(
|
||||
await fetch(`${this.baseUrl}/api/workflow-templates?${search.toString()}`),
|
||||
);
|
||||
}
|
||||
|
||||
async createWorkflowTemplate(input: {
|
||||
workspaceId: string;
|
||||
projectId?: string;
|
||||
name: string;
|
||||
description?: string;
|
||||
visualGraph: Record<string, unknown>;
|
||||
logicGraph: Record<string, unknown>;
|
||||
runtimeGraph: Record<string, unknown>;
|
||||
pluginRefs: string[];
|
||||
createdBy?: string;
|
||||
}) {
|
||||
return readJson<any>(
|
||||
await fetch(`${this.baseUrl}/api/workflow-templates`, {
|
||||
method: "POST",
|
||||
headers: { "content-type": "application/json" },
|
||||
body: JSON.stringify(input),
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
async createWorkflowFromTemplate(input: {
|
||||
templateId: string;
|
||||
workspaceId: string;
|
||||
projectId: string;
|
||||
name: string;
|
||||
createdBy?: string;
|
||||
}) {
|
||||
return readJson<any>(
|
||||
await fetch(`${this.baseUrl}/api/workflow-templates/${input.templateId}/workflows`, {
|
||||
method: "POST",
|
||||
headers: { "content-type": "application/json" },
|
||||
body: JSON.stringify({
|
||||
workspaceId: input.workspaceId,
|
||||
projectId: input.projectId,
|
||||
name: input.name,
|
||||
createdBy: input.createdBy,
|
||||
}),
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
async createRun(input: {
|
||||
workflowDefinitionId: string;
|
||||
workflowVersionId: string;
|
||||
|
||||
@ -39,6 +39,38 @@ import {
|
||||
} from "./workflow-editor-state.ts";
|
||||
|
||||
const NODE_LIBRARY_MIME = "application/x-emboflow-node-definition";
|
||||
const ACTIVE_PROJECT_STORAGE_KEY_PREFIX = "emboflow.activeProject";
|
||||
|
||||
function navigateTo(pathname: string) {
|
||||
if (typeof window === "undefined") {
|
||||
return;
|
||||
}
|
||||
if (window.location.pathname === pathname) {
|
||||
return;
|
||||
}
|
||||
window.history.pushState({}, "", pathname);
|
||||
window.dispatchEvent(new PopStateEvent("popstate"));
|
||||
}
|
||||
|
||||
function getActiveProjectStorageKey(workspaceId: string) {
|
||||
return `${ACTIVE_PROJECT_STORAGE_KEY_PREFIX}:${workspaceId}`;
|
||||
}
|
||||
|
||||
function normalizePathnameForProjectSwitch(pathname: string) {
|
||||
if (pathname.startsWith("/assets/")) {
|
||||
return "/assets";
|
||||
}
|
||||
if (pathname.startsWith("/workflows/")) {
|
||||
return "/workflows";
|
||||
}
|
||||
if (pathname.startsWith("/runs/")) {
|
||||
return "/runs";
|
||||
}
|
||||
if (pathname.startsWith("/explore/")) {
|
||||
return "/explore";
|
||||
}
|
||||
return pathname === "/" ? "/projects" : pathname;
|
||||
}
|
||||
|
||||
function mapConnectionValidationReasonToKey(
|
||||
reason: WorkflowConnectionValidationReason | "missing_connection_endpoint",
|
||||
@ -63,7 +95,7 @@ function mapConnectionValidationReasonToKey(
|
||||
}
|
||||
}
|
||||
|
||||
type NavItem = "Assets" | "Workflows" | "Runs" | "Explore" | "Labels" | "Admin";
|
||||
type NavItem = "Projects" | "Assets" | "Workflows" | "Runs" | "Explore" | "Labels" | "Admin";
|
||||
|
||||
type BootstrapContext = {
|
||||
userId: string;
|
||||
@ -71,6 +103,14 @@ type BootstrapContext = {
|
||||
project: { _id: string; name: string };
|
||||
};
|
||||
|
||||
type ProjectSummary = {
|
||||
_id: string;
|
||||
name: string;
|
||||
description?: string;
|
||||
status?: string;
|
||||
createdAt?: string;
|
||||
};
|
||||
|
||||
type AppProps = {
|
||||
apiBaseUrl: string;
|
||||
};
|
||||
@ -132,7 +172,7 @@ function formatExecutorConfigLabel(config?: Record<string, unknown>) {
|
||||
|
||||
function usePathname() {
|
||||
const [pathname, setPathname] = useState(
|
||||
typeof window === "undefined" ? "/assets" : window.location.pathname || "/assets",
|
||||
typeof window === "undefined" ? "/projects" : window.location.pathname || "/projects",
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
@ -141,17 +181,19 @@ function usePathname() {
|
||||
return () => window.removeEventListener("popstate", handle);
|
||||
}, []);
|
||||
|
||||
return pathname === "/" ? "/assets" : pathname;
|
||||
return pathname === "/" ? "/projects" : pathname;
|
||||
}
|
||||
|
||||
function AppShell(props: {
|
||||
workspaceName: string;
|
||||
projectName: string;
|
||||
projectControl?: React.ReactNode;
|
||||
active: NavItem;
|
||||
children: React.ReactNode;
|
||||
}) {
|
||||
const { language, setLanguage, t } = useI18n();
|
||||
const navItems: Array<{ label: NavItem; href: string; key: "navAssets" | "navWorkflows" | "navRuns" | "navExplore" | "navLabels" | "navAdmin" }> = [
|
||||
const navItems: Array<{ label: NavItem; href: string; key: "navProjects" | "navAssets" | "navWorkflows" | "navRuns" | "navExplore" | "navLabels" | "navAdmin" }> = [
|
||||
{ label: "Projects", href: "/projects", key: "navProjects" },
|
||||
{ label: "Assets", href: "/assets", key: "navAssets" },
|
||||
{ label: "Workflows", href: "/workflows", key: "navWorkflows" },
|
||||
{ label: "Runs", href: "/runs", key: "navRuns" },
|
||||
@ -170,7 +212,7 @@ function AppShell(props: {
|
||||
</div>
|
||||
<div className="app-header__pill">
|
||||
<span className="app-header__label">{t("project")}</span>
|
||||
<strong>{props.projectName}</strong>
|
||||
{props.projectControl ?? <strong>{props.projectName}</strong>}
|
||||
</div>
|
||||
</div>
|
||||
<div className="app-header__group">
|
||||
@ -200,7 +242,14 @@ function AppShell(props: {
|
||||
<ul>
|
||||
{navItems.map((item) => (
|
||||
<li key={item.label}>
|
||||
<a href={item.href} data-active={String(item.label === props.active)}>
|
||||
<a
|
||||
href={item.href}
|
||||
data-active={String(item.label === props.active)}
|
||||
onClick={(event) => {
|
||||
event.preventDefault();
|
||||
navigateTo(item.href);
|
||||
}}
|
||||
>
|
||||
{t(item.key)}
|
||||
</a>
|
||||
</li>
|
||||
@ -212,25 +261,163 @@ function AppShell(props: {
|
||||
);
|
||||
}
|
||||
|
||||
function ProjectsPage(props: {
|
||||
api: ApiClient;
|
||||
bootstrap: BootstrapContext;
|
||||
projects: ProjectSummary[];
|
||||
activeProjectId: string;
|
||||
onProjectCreated: (project: ProjectSummary) => Promise<void> | void;
|
||||
onProjectSelected: (projectId: string, nextPath?: string) => void;
|
||||
}) {
|
||||
const { t } = useI18n();
|
||||
const [projectName, setProjectName] = useState("");
|
||||
const [projectDescription, setProjectDescription] = useState("");
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
return (
|
||||
<div className="page-stack">
|
||||
<section className="panel">
|
||||
<h1>{t("projectsTitle")}</h1>
|
||||
<p>{t("projectsDescription")}</p>
|
||||
<div className="field-grid">
|
||||
<label>
|
||||
{t("projectNameLabel")}
|
||||
<input
|
||||
value={projectName}
|
||||
onChange={(event) => setProjectName(event.target.value)}
|
||||
placeholder="Embodied Delivery Project"
|
||||
/>
|
||||
</label>
|
||||
<label>
|
||||
{t("projectDescriptionLabel")}
|
||||
<textarea
|
||||
rows={3}
|
||||
value={projectDescription}
|
||||
onChange={(event) => setProjectDescription(event.target.value)}
|
||||
placeholder="Customer-specific dataset conversion and delivery workflows"
|
||||
/>
|
||||
</label>
|
||||
</div>
|
||||
<div className="button-row" style={{ marginTop: 12 }}>
|
||||
<button
|
||||
className="button-primary"
|
||||
disabled={projectName.trim().length === 0}
|
||||
onClick={async () => {
|
||||
setError(null);
|
||||
try {
|
||||
const project = await props.api.createProject({
|
||||
workspaceId: props.bootstrap.workspace._id,
|
||||
name: projectName.trim(),
|
||||
description: projectDescription.trim() || undefined,
|
||||
createdBy: props.bootstrap.userId,
|
||||
});
|
||||
setProjectName("");
|
||||
setProjectDescription("");
|
||||
await props.onProjectCreated(project);
|
||||
} catch (createError) {
|
||||
setError(
|
||||
createError instanceof Error ? createError.message : t("failedCreateProject"),
|
||||
);
|
||||
}
|
||||
}}
|
||||
>
|
||||
{t("createProject")}
|
||||
</button>
|
||||
</div>
|
||||
{error ? <p>{error}</p> : null}
|
||||
</section>
|
||||
|
||||
<section className="panel">
|
||||
<div className="list-grid">
|
||||
{props.projects.length === 0 ? (
|
||||
<p className="empty-state">{t("noProjectsYet")}</p>
|
||||
) : (
|
||||
props.projects.map((project) => {
|
||||
const isActive = project._id === props.activeProjectId;
|
||||
return (
|
||||
<article key={project._id} className="asset-card" data-active={String(isActive)}>
|
||||
<div className="toolbar">
|
||||
<strong>{project.name}</strong>
|
||||
{isActive ? (
|
||||
<span className="status-pill" data-status="running">
|
||||
{t("activeProject")}
|
||||
</span>
|
||||
) : null}
|
||||
</div>
|
||||
<p>{project.description || t("notAvailable")}</p>
|
||||
<p>{t("status")}: {translateStatus(project.status, t)}</p>
|
||||
<p>{t("createdAt")}: {project.createdAt ?? t("notAvailable")}</p>
|
||||
<div className="button-row" style={{ marginTop: 12 }}>
|
||||
<button
|
||||
className={isActive ? "button-secondary" : "button-primary"}
|
||||
onClick={() => props.onProjectSelected(project._id, "/workflows")}
|
||||
>
|
||||
{t("openProject")}
|
||||
</button>
|
||||
</div>
|
||||
</article>
|
||||
);
|
||||
})
|
||||
)}
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function AssetsPage(props: {
|
||||
api: ApiClient;
|
||||
bootstrap: BootstrapContext;
|
||||
}) {
|
||||
const { t } = useI18n();
|
||||
const [sourcePath, setSourcePath] = useState("");
|
||||
const [storageName, setStorageName] = useState("");
|
||||
const [storageProvider, setStorageProvider] = useState<"local" | "minio" | "s3" | "bos" | "oss">("local");
|
||||
const [storageBucket, setStorageBucket] = useState("");
|
||||
const [storageEndpoint, setStorageEndpoint] = useState("");
|
||||
const [storageRegion, setStorageRegion] = useState("");
|
||||
const [storageBasePath, setStorageBasePath] = useState("");
|
||||
const [storageRootPath, setStorageRootPath] = useState("");
|
||||
const [datasetName, setDatasetName] = useState("");
|
||||
const [datasetDescription, setDatasetDescription] = useState("");
|
||||
const [datasetStoragePath, setDatasetStoragePath] = useState("");
|
||||
const [selectedDatasetAssetId, setSelectedDatasetAssetId] = useState("");
|
||||
const [selectedStorageConnectionId, setSelectedStorageConnectionId] = useState("");
|
||||
const [assets, setAssets] = useState<any[]>([]);
|
||||
const [storageConnections, setStorageConnections] = useState<any[]>([]);
|
||||
const [datasets, setDatasets] = useState<any[]>([]);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
const loadAssets = async () => {
|
||||
const loadData = async () => {
|
||||
try {
|
||||
setAssets(await props.api.listAssets(props.bootstrap.project._id));
|
||||
const [nextAssets, nextStorageConnections, nextDatasets] = await Promise.all([
|
||||
props.api.listAssets(props.bootstrap.project._id),
|
||||
props.api.listStorageConnections(props.bootstrap.workspace._id),
|
||||
props.api.listDatasets(props.bootstrap.project._id),
|
||||
]);
|
||||
setAssets(nextAssets);
|
||||
setStorageConnections(nextStorageConnections);
|
||||
setDatasets(nextDatasets);
|
||||
setSelectedDatasetAssetId((previous) => {
|
||||
if (previous && nextAssets.some((asset) => asset._id === previous)) {
|
||||
return previous;
|
||||
}
|
||||
return nextAssets[0]?._id ?? "";
|
||||
});
|
||||
setSelectedStorageConnectionId((previous) => {
|
||||
if (previous && nextStorageConnections.some((connection) => connection._id === previous)) {
|
||||
return previous;
|
||||
}
|
||||
return nextStorageConnections[0]?._id ?? "";
|
||||
});
|
||||
setError(null);
|
||||
} catch (loadError) {
|
||||
setError(loadError instanceof Error ? loadError.message : t("failedLoadAssets"));
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
void loadAssets();
|
||||
void loadData();
|
||||
}, [props.bootstrap.project._id]);
|
||||
|
||||
return (
|
||||
@ -260,7 +447,7 @@ function AssetsPage(props: {
|
||||
sourcePath,
|
||||
});
|
||||
await props.api.probeAsset(asset._id);
|
||||
await loadAssets();
|
||||
await loadData();
|
||||
} catch (registerError) {
|
||||
setError(
|
||||
registerError instanceof Error
|
||||
@ -276,6 +463,239 @@ function AssetsPage(props: {
|
||||
{error ? <p>{error}</p> : null}
|
||||
</section>
|
||||
|
||||
<section className="two-column">
|
||||
<section className="panel">
|
||||
<h2>{t("storageConnectionsTitle")}</h2>
|
||||
<p>{t("storageConnectionsDescription")}</p>
|
||||
<div className="field-grid">
|
||||
<label>
|
||||
{t("templateName")}
|
||||
<input
|
||||
value={storageName}
|
||||
onChange={(event) => setStorageName(event.target.value)}
|
||||
placeholder="Local Workspace Storage"
|
||||
/>
|
||||
</label>
|
||||
<label>
|
||||
{t("storageProvider")}
|
||||
<select
|
||||
value={storageProvider}
|
||||
onChange={(event) =>
|
||||
setStorageProvider(event.target.value as "local" | "minio" | "s3" | "bos" | "oss")
|
||||
}
|
||||
>
|
||||
<option value="local">local</option>
|
||||
<option value="minio">minio</option>
|
||||
<option value="s3">s3</option>
|
||||
<option value="bos">bos</option>
|
||||
<option value="oss">oss</option>
|
||||
</select>
|
||||
</label>
|
||||
<label>
|
||||
{t("bucket")}
|
||||
<input
|
||||
value={storageBucket}
|
||||
onChange={(event) => setStorageBucket(event.target.value)}
|
||||
placeholder="emboflow-datasets"
|
||||
/>
|
||||
</label>
|
||||
<label>
|
||||
{t("endpoint")}
|
||||
<input
|
||||
value={storageEndpoint}
|
||||
onChange={(event) => setStorageEndpoint(event.target.value)}
|
||||
placeholder="oss-cn-hangzhou.aliyuncs.com"
|
||||
/>
|
||||
</label>
|
||||
<label>
|
||||
{t("region")}
|
||||
<input
|
||||
value={storageRegion}
|
||||
onChange={(event) => setStorageRegion(event.target.value)}
|
||||
placeholder="cn-hangzhou"
|
||||
/>
|
||||
</label>
|
||||
<label>
|
||||
{t("basePath")}
|
||||
<input
|
||||
value={storageBasePath}
|
||||
onChange={(event) => setStorageBasePath(event.target.value)}
|
||||
placeholder="datasets/project-a"
|
||||
/>
|
||||
</label>
|
||||
<label>
|
||||
{t("rootPath")}
|
||||
<input
|
||||
value={storageRootPath}
|
||||
onChange={(event) => setStorageRootPath(event.target.value)}
|
||||
placeholder="/Users/longtaowu/workspace/emboldata"
|
||||
/>
|
||||
</label>
|
||||
</div>
|
||||
<div className="button-row" style={{ marginTop: 12 }}>
|
||||
<button
|
||||
className="button-primary"
|
||||
onClick={async () => {
|
||||
setError(null);
|
||||
try {
|
||||
await props.api.createStorageConnection({
|
||||
workspaceId: props.bootstrap.workspace._id,
|
||||
name: storageName || `${storageProvider} storage`,
|
||||
provider: storageProvider,
|
||||
bucket: storageBucket || undefined,
|
||||
endpoint: storageEndpoint || undefined,
|
||||
region: storageRegion || undefined,
|
||||
basePath: storageBasePath || undefined,
|
||||
rootPath: storageRootPath || undefined,
|
||||
});
|
||||
setStorageName("");
|
||||
setStorageBucket("");
|
||||
setStorageEndpoint("");
|
||||
setStorageRegion("");
|
||||
setStorageBasePath("");
|
||||
setStorageRootPath("");
|
||||
await loadData();
|
||||
} catch (createError) {
|
||||
setError(
|
||||
createError instanceof Error
|
||||
? createError.message
|
||||
: t("failedCreateStorageConnection"),
|
||||
);
|
||||
}
|
||||
}}
|
||||
>
|
||||
{t("createStorageConnection")}
|
||||
</button>
|
||||
</div>
|
||||
<div className="list-grid" style={{ marginTop: 16 }}>
|
||||
{storageConnections.length === 0 ? (
|
||||
<p className="empty-state">{t("noStorageConnectionsYet")}</p>
|
||||
) : (
|
||||
storageConnections.map((connection) => (
|
||||
<article key={connection._id} className="asset-card">
|
||||
<div className="toolbar">
|
||||
<strong>{connection.name}</strong>
|
||||
<span className="status-pill">{connection.provider}</span>
|
||||
</div>
|
||||
<p>{t("bucket")}: {connection.bucket ?? t("notAvailable")}</p>
|
||||
<p>{t("endpoint")}: {connection.endpoint ?? connection.rootPath ?? t("notAvailable")}</p>
|
||||
<p>{t("basePath")}: {connection.basePath ?? t("notAvailable")}</p>
|
||||
</article>
|
||||
))
|
||||
)}
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<section className="panel">
|
||||
<h2>{t("datasetsTitle")}</h2>
|
||||
<p>{t("datasetsDescription")}</p>
|
||||
<div className="field-grid">
|
||||
<label>
|
||||
{t("datasetName")}
|
||||
<input
|
||||
value={datasetName}
|
||||
onChange={(event) => setDatasetName(event.target.value)}
|
||||
placeholder="Delivery Dataset"
|
||||
/>
|
||||
</label>
|
||||
<label>
|
||||
{t("datasetDescription")}
|
||||
<input
|
||||
value={datasetDescription}
|
||||
onChange={(event) => setDatasetDescription(event.target.value)}
|
||||
placeholder="Project level dataset derived from source assets"
|
||||
/>
|
||||
</label>
|
||||
<label>
|
||||
{t("sourceAsset")}
|
||||
<select
|
||||
value={selectedDatasetAssetId}
|
||||
onChange={(event) => setSelectedDatasetAssetId(event.target.value)}
|
||||
>
|
||||
{assets.length === 0 ? <option value="">{t("noAssetsAvailable")}</option> : null}
|
||||
{assets.map((asset) => (
|
||||
<option key={asset._id} value={asset._id}>
|
||||
{asset.displayName}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
</label>
|
||||
<label>
|
||||
{t("storageConnection")}
|
||||
<select
|
||||
value={selectedStorageConnectionId}
|
||||
onChange={(event) => setSelectedStorageConnectionId(event.target.value)}
|
||||
>
|
||||
{storageConnections.length === 0 ? <option value="">{t("noStorageConnectionsYet")}</option> : null}
|
||||
{storageConnections.map((connection) => (
|
||||
<option key={connection._id} value={connection._id}>
|
||||
{connection.name}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
</label>
|
||||
<label>
|
||||
{t("storagePathLabel")}
|
||||
<input
|
||||
value={datasetStoragePath}
|
||||
onChange={(event) => setDatasetStoragePath(event.target.value)}
|
||||
placeholder="delivery/dataset-v1"
|
||||
/>
|
||||
</label>
|
||||
</div>
|
||||
<div className="button-row" style={{ marginTop: 12 }}>
|
||||
<button
|
||||
className="button-primary"
|
||||
disabled={!selectedDatasetAssetId || !selectedStorageConnectionId}
|
||||
onClick={async () => {
|
||||
setError(null);
|
||||
try {
|
||||
await props.api.createDataset({
|
||||
workspaceId: props.bootstrap.workspace._id,
|
||||
projectId: props.bootstrap.project._id,
|
||||
name:
|
||||
datasetName ||
|
||||
`Dataset from ${assets.find((asset) => asset._id === selectedDatasetAssetId)?.displayName ?? "asset"}`,
|
||||
description: datasetDescription || undefined,
|
||||
sourceAssetIds: [selectedDatasetAssetId],
|
||||
storageConnectionId: selectedStorageConnectionId,
|
||||
storagePath:
|
||||
datasetStoragePath ||
|
||||
`datasets/${datasetName ? datasetName.toLowerCase().replace(/\s+/gu, "-") : "dataset"}`,
|
||||
});
|
||||
setDatasetName("");
|
||||
setDatasetDescription("");
|
||||
setDatasetStoragePath("");
|
||||
await loadData();
|
||||
} catch (createError) {
|
||||
setError(
|
||||
createError instanceof Error ? createError.message : t("failedCreateDataset"),
|
||||
);
|
||||
}
|
||||
}}
|
||||
>
|
||||
{t("createDataset")}
|
||||
</button>
|
||||
</div>
|
||||
<div className="list-grid" style={{ marginTop: 16 }}>
|
||||
{datasets.length === 0 ? (
|
||||
<p className="empty-state">{t("noDatasetsYet")}</p>
|
||||
) : (
|
||||
datasets.map((dataset) => (
|
||||
<article key={dataset._id} className="asset-card">
|
||||
<strong>{dataset.name}</strong>
|
||||
<p>{t("status")}: {translateStatus(dataset.status, t)}</p>
|
||||
<p>{t("sourceAssets")}: {(dataset.sourceAssetIds ?? []).join(", ") || t("none")}</p>
|
||||
<p>{t("storageConnection")}: {storageConnections.find((item) => item._id === dataset.storageConnectionId)?.name ?? dataset.storageConnectionId}</p>
|
||||
<p>{t("storagePathLabel")}: {dataset.storagePath}</p>
|
||||
<p>{t("latestDatasetVersion")}: {dataset.latestVersionNumber}</p>
|
||||
</article>
|
||||
))
|
||||
)}
|
||||
</div>
|
||||
</section>
|
||||
</section>
|
||||
|
||||
<section className="panel">
|
||||
<div className="list-grid">
|
||||
{assets.length === 0 ? (
|
||||
@ -407,19 +827,29 @@ function WorkflowsPage(props: {
|
||||
}) {
|
||||
const { t } = useI18n();
|
||||
const [workflows, setWorkflows] = useState<any[]>([]);
|
||||
const [templates, setTemplates] = useState<any[]>([]);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
const load = async () => {
|
||||
try {
|
||||
setWorkflows(await props.api.listWorkflows(props.bootstrap.project._id));
|
||||
const [nextWorkflows, nextTemplates] = await Promise.all([
|
||||
props.api.listWorkflows(props.bootstrap.project._id),
|
||||
props.api.listWorkflowTemplates({
|
||||
workspaceId: props.bootstrap.workspace._id,
|
||||
projectId: props.bootstrap.project._id,
|
||||
}),
|
||||
]);
|
||||
setWorkflows(nextWorkflows);
|
||||
setTemplates(nextTemplates);
|
||||
setError(null);
|
||||
} catch (loadError) {
|
||||
setError(loadError instanceof Error ? loadError.message : t("failedLoadWorkflows"));
|
||||
setError(loadError instanceof Error ? loadError.message : t("failedLoadTemplates"));
|
||||
}
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
void load();
|
||||
}, [props.bootstrap.project._id]);
|
||||
}, [props.bootstrap.project._id, props.bootstrap.workspace._id]);
|
||||
|
||||
return (
|
||||
<div className="page-stack">
|
||||
@ -429,36 +859,89 @@ function WorkflowsPage(props: {
|
||||
<button
|
||||
className="button-primary"
|
||||
onClick={async () => {
|
||||
await props.api.createWorkflow({
|
||||
const workflow = await props.api.createWorkflow({
|
||||
workspaceId: props.bootstrap.workspace._id,
|
||||
projectId: props.bootstrap.project._id,
|
||||
name: t("workflowCreatedName", { count: workflows.length + 1 }),
|
||||
});
|
||||
navigateTo(`/workflows/${workflow._id}`);
|
||||
await load();
|
||||
}}
|
||||
>
|
||||
{t("createWorkflow")}
|
||||
{t("createBlankWorkflow")}
|
||||
</button>
|
||||
</div>
|
||||
{error ? <p>{error}</p> : null}
|
||||
</section>
|
||||
|
||||
<section className="panel">
|
||||
<div className="list-grid">
|
||||
{workflows.length === 0 ? (
|
||||
<p className="empty-state">{t("noWorkflowsYet")}</p>
|
||||
) : (
|
||||
workflows.map((workflow) => (
|
||||
<article key={workflow._id} className="asset-card">
|
||||
<a href={`/workflows/${workflow._id}`}>
|
||||
<strong>{workflow.name}</strong>
|
||||
</a>
|
||||
<p>{t("status")}: {translateStatus(workflow.status, t)}</p>
|
||||
<p>{t("latestVersion")}: {workflow.latestVersionNumber}</p>
|
||||
</article>
|
||||
))
|
||||
)}
|
||||
</div>
|
||||
<section className="two-column">
|
||||
<section className="panel">
|
||||
<h2>{t("workflowTemplatesTitle")}</h2>
|
||||
<p>{t("workflowTemplatesDescription")}</p>
|
||||
<div className="list-grid">
|
||||
{templates.length === 0 ? (
|
||||
<p className="empty-state">{t("noWorkflowTemplatesYet")}</p>
|
||||
) : (
|
||||
templates.map((template) => (
|
||||
<article key={template._id} className="asset-card">
|
||||
<div className="toolbar">
|
||||
<strong>{template.name}</strong>
|
||||
<span className="status-pill">{template.projectId ? t("project") : t("workspace")}</span>
|
||||
</div>
|
||||
<p>{template.description || t("notAvailable")}</p>
|
||||
<p>{t("status")}: {translateStatus(template.status, t)}</p>
|
||||
<div className="button-row" style={{ marginTop: 12 }}>
|
||||
<button
|
||||
className="button-primary"
|
||||
onClick={async () => {
|
||||
try {
|
||||
setError(null);
|
||||
const workflow = await props.api.createWorkflowFromTemplate({
|
||||
templateId: template._id,
|
||||
workspaceId: props.bootstrap.workspace._id,
|
||||
projectId: props.bootstrap.project._id,
|
||||
name: `${template.name} ${workflows.length + 1}`,
|
||||
createdBy: props.bootstrap.userId,
|
||||
});
|
||||
navigateTo(`/workflows/${workflow._id}`);
|
||||
} catch (createError) {
|
||||
setError(
|
||||
createError instanceof Error
|
||||
? createError.message
|
||||
: t("failedCreateWorkflowFromTemplate"),
|
||||
);
|
||||
}
|
||||
}}
|
||||
>
|
||||
{t("createWorkflowFromTemplate")}
|
||||
</button>
|
||||
</div>
|
||||
</article>
|
||||
))
|
||||
)}
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<section className="panel">
|
||||
<div className="toolbar">
|
||||
<h2 style={{ margin: 0 }}>{t("workflowsTitle")}</h2>
|
||||
</div>
|
||||
<div className="list-grid" style={{ marginTop: 12 }}>
|
||||
{workflows.length === 0 ? (
|
||||
<p className="empty-state">{t("noWorkflowsYet")}</p>
|
||||
) : (
|
||||
workflows.map((workflow) => (
|
||||
<article key={workflow._id} className="asset-card">
|
||||
<a href={`/workflows/${workflow._id}`}>
|
||||
<strong>{workflow.name}</strong>
|
||||
</a>
|
||||
<p>{t("status")}: {translateStatus(workflow.status, t)}</p>
|
||||
<p>{t("latestVersion")}: {workflow.latestVersionNumber}</p>
|
||||
</article>
|
||||
))
|
||||
)}
|
||||
</div>
|
||||
</section>
|
||||
</section>
|
||||
</div>
|
||||
);
|
||||
@ -479,6 +962,9 @@ function WorkflowEditorPage(props: {
|
||||
const [lastRunId, setLastRunId] = useState<string | null>(null);
|
||||
const [dirty, setDirty] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [templateName, setTemplateName] = useState("");
|
||||
const [templateDescription, setTemplateDescription] = useState("");
|
||||
const [savedTemplateName, setSavedTemplateName] = useState<string | null>(null);
|
||||
const [canvasFeedbackKey, setCanvasFeedbackKey] = useState<TranslationKey | null>(null);
|
||||
const [canvasDropActive, setCanvasDropActive] = useState(false);
|
||||
const [flowInstance, setFlowInstance] = useState<ReactFlowInstance<Node, Edge> | null>(null);
|
||||
@ -505,6 +991,9 @@ function WorkflowEditorPage(props: {
|
||||
const nextDraft = workflowDraftFromVersion(workflowVersions[0] ?? null);
|
||||
setDraft(nextDraft);
|
||||
setSelectedNodeId(nextDraft.logicGraph.nodes[0]?.id ?? "rename-folder");
|
||||
setTemplateName(`${workflowDefinition.name} Template`);
|
||||
setTemplateDescription("");
|
||||
setSavedTemplateName(null);
|
||||
setDirty(false);
|
||||
setCanvasFeedbackKey(null);
|
||||
} catch (loadError) {
|
||||
@ -970,6 +1459,68 @@ function WorkflowEditorPage(props: {
|
||||
) : (
|
||||
<p className="empty-state">{t("selectNode")}</p>
|
||||
)}
|
||||
<div className="template-save-section">
|
||||
<h3>{t("saveAsTemplate")}</h3>
|
||||
<div className="field-grid">
|
||||
<label>
|
||||
{t("templateName")}
|
||||
<input
|
||||
value={templateName}
|
||||
onChange={(event) => setTemplateName(event.target.value)}
|
||||
placeholder={`${workflow?.name ?? "Workflow"} Template`}
|
||||
/>
|
||||
</label>
|
||||
<label>
|
||||
{t("templateDescription")}
|
||||
<textarea
|
||||
rows={4}
|
||||
value={templateDescription}
|
||||
onChange={(event) => setTemplateDescription(event.target.value)}
|
||||
placeholder="Reusable project workflow for delivery normalization"
|
||||
/>
|
||||
</label>
|
||||
</div>
|
||||
<div className="button-row" style={{ marginTop: 12 }}>
|
||||
<button
|
||||
className="button-primary"
|
||||
disabled={!workflow || templateName.trim().length === 0}
|
||||
onClick={async () => {
|
||||
if (!workflow) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
setError(null);
|
||||
const templatePayload = serializeWorkflowDraft(draft);
|
||||
const template = await props.api.createWorkflowTemplate({
|
||||
workspaceId: workflow.workspaceId,
|
||||
projectId: workflow.projectId,
|
||||
name: templateName.trim(),
|
||||
description: templateDescription.trim() || undefined,
|
||||
visualGraph: templatePayload.visualGraph,
|
||||
logicGraph: templatePayload.logicGraph,
|
||||
runtimeGraph: templatePayload.runtimeGraph,
|
||||
pluginRefs: templatePayload.pluginRefs,
|
||||
createdBy: workflow.createdBy ?? "local-user",
|
||||
});
|
||||
setSavedTemplateName(template.name);
|
||||
} catch (createError) {
|
||||
setError(
|
||||
createError instanceof Error
|
||||
? createError.message
|
||||
: t("failedCreateTemplate"),
|
||||
);
|
||||
}
|
||||
}}
|
||||
>
|
||||
{t("saveAsTemplate")}
|
||||
</button>
|
||||
</div>
|
||||
{savedTemplateName ? (
|
||||
<p className="empty-state">
|
||||
{t("templateSaved")}: {savedTemplateName}
|
||||
</p>
|
||||
) : null}
|
||||
</div>
|
||||
</aside>
|
||||
</section>
|
||||
</div>
|
||||
@ -1379,19 +1930,46 @@ export function App(props: AppProps) {
|
||||
const api = useMemo(() => new ApiClient(props.apiBaseUrl), [props.apiBaseUrl]);
|
||||
const pathname = usePathname();
|
||||
const [bootstrap, setBootstrap] = useState<BootstrapContext | null>(null);
|
||||
const [projects, setProjects] = useState<ProjectSummary[]>([]);
|
||||
const [activeProjectId, setActiveProjectId] = useState("");
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
const syncProjects = useCallback(
|
||||
async (context: BootstrapContext, preferredProjectId?: string) => {
|
||||
const nextProjects = (await api.listProjects(context.workspace._id)) as ProjectSummary[];
|
||||
setProjects(nextProjects);
|
||||
|
||||
const storageKey = getActiveProjectStorageKey(context.workspace._id);
|
||||
const storedProjectId =
|
||||
typeof window === "undefined" ? null : window.localStorage.getItem(storageKey);
|
||||
const resolvedProject =
|
||||
nextProjects.find((project) => project._id === preferredProjectId) ??
|
||||
nextProjects.find((project) => project._id === storedProjectId) ??
|
||||
nextProjects.find((project) => project._id === context.project._id) ??
|
||||
nextProjects[0];
|
||||
|
||||
const resolvedProjectId = resolvedProject?._id ?? context.project._id;
|
||||
setActiveProjectId(resolvedProjectId);
|
||||
if (typeof window !== "undefined") {
|
||||
window.localStorage.setItem(storageKey, resolvedProjectId);
|
||||
}
|
||||
},
|
||||
[api],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
void (async () => {
|
||||
try {
|
||||
setBootstrap(await api.bootstrapDev());
|
||||
const context = await api.bootstrapDev();
|
||||
setBootstrap(context);
|
||||
await syncProjects(context, context.project._id);
|
||||
} catch (bootstrapError) {
|
||||
setError(
|
||||
bootstrapError instanceof Error ? bootstrapError.message : t("failedBootstrap"),
|
||||
);
|
||||
}
|
||||
})();
|
||||
}, [api, t]);
|
||||
}, [api, syncProjects, t]);
|
||||
|
||||
if (error) {
|
||||
return <section className="panel">{error}</section>;
|
||||
@ -1400,23 +1978,68 @@ export function App(props: AppProps) {
|
||||
return <section className="panel">{t("bootstrappingLocalWorkspace")}</section>;
|
||||
}
|
||||
|
||||
const activeProject =
|
||||
projects.find((project) => project._id === activeProjectId) ?? {
|
||||
_id: bootstrap.project._id,
|
||||
name: bootstrap.project.name,
|
||||
};
|
||||
const activeBootstrap: BootstrapContext = {
|
||||
...bootstrap,
|
||||
project: {
|
||||
_id: activeProject._id,
|
||||
name: activeProject.name,
|
||||
},
|
||||
};
|
||||
|
||||
const handleProjectSelected = (projectId: string, nextPath?: string) => {
|
||||
setActiveProjectId(projectId);
|
||||
if (typeof window !== "undefined") {
|
||||
window.localStorage.setItem(
|
||||
getActiveProjectStorageKey(activeBootstrap.workspace._id),
|
||||
projectId,
|
||||
);
|
||||
}
|
||||
if (nextPath) {
|
||||
navigateTo(nextPath);
|
||||
return;
|
||||
}
|
||||
navigateTo(normalizePathnameForProjectSwitch(pathname));
|
||||
};
|
||||
|
||||
const handleProjectCreated = async (project: ProjectSummary) => {
|
||||
if (!bootstrap) {
|
||||
return;
|
||||
}
|
||||
await syncProjects(bootstrap, project._id);
|
||||
navigateTo("/workflows");
|
||||
};
|
||||
|
||||
const assetMatch = pathname.match(/^\/assets\/([^/]+)$/);
|
||||
const workflowMatch = pathname.match(/^\/workflows\/([^/]+)$/);
|
||||
const runMatch = pathname.match(/^\/runs\/([^/]+)$/);
|
||||
const exploreMatch = pathname.match(/^\/explore\/([^/]+)$/);
|
||||
|
||||
let active: NavItem = "Assets";
|
||||
let content: React.ReactNode = <AssetsPage api={api} bootstrap={bootstrap} />;
|
||||
let active: NavItem = "Projects";
|
||||
let content: React.ReactNode = (
|
||||
<ProjectsPage
|
||||
api={api}
|
||||
bootstrap={activeBootstrap}
|
||||
projects={projects}
|
||||
activeProjectId={activeProject._id}
|
||||
onProjectCreated={handleProjectCreated}
|
||||
onProjectSelected={handleProjectSelected}
|
||||
/>
|
||||
);
|
||||
|
||||
if (pathname === "/workflows") {
|
||||
active = "Workflows";
|
||||
content = <WorkflowsPage api={api} bootstrap={bootstrap} />;
|
||||
content = <WorkflowsPage api={api} bootstrap={activeBootstrap} />;
|
||||
} else if (workflowMatch) {
|
||||
active = "Workflows";
|
||||
content = <WorkflowEditorPage api={api} workflowId={workflowMatch[1]} />;
|
||||
} else if (pathname === "/runs") {
|
||||
active = "Runs";
|
||||
content = <RunsIndexPage api={api} bootstrap={bootstrap} />;
|
||||
content = <RunsIndexPage api={api} bootstrap={activeBootstrap} />;
|
||||
} else if (runMatch) {
|
||||
active = "Runs";
|
||||
content = <RunDetailPage api={api} runId={runMatch[1]} />;
|
||||
@ -1429,12 +2052,28 @@ export function App(props: AppProps) {
|
||||
} else if (assetMatch) {
|
||||
active = "Assets";
|
||||
content = <AssetDetailPage api={api} assetId={assetMatch[1]} />;
|
||||
} else if (pathname === "/assets") {
|
||||
active = "Assets";
|
||||
content = <AssetsPage api={api} bootstrap={activeBootstrap} />;
|
||||
}
|
||||
|
||||
return (
|
||||
<AppShell
|
||||
workspaceName={bootstrap.workspace.name}
|
||||
projectName={bootstrap.project.name}
|
||||
workspaceName={activeBootstrap.workspace.name}
|
||||
projectName={activeBootstrap.project.name}
|
||||
projectControl={
|
||||
<select
|
||||
className="app-header__select"
|
||||
value={activeProject._id}
|
||||
onChange={(event) => handleProjectSelected(event.target.value)}
|
||||
>
|
||||
{(projects.length > 0 ? projects : [activeProject]).map((project) => (
|
||||
<option key={project._id} value={project._id}>
|
||||
{project.name}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
}
|
||||
active={active}
|
||||
>
|
||||
{content}
|
||||
|
||||
@ -4,6 +4,10 @@ import assert from "node:assert/strict";
|
||||
import { localizeNodeDefinition, translate } from "./i18n.tsx";
|
||||
|
||||
test("translate returns chinese and english labels for shared frontend keys", () => {
|
||||
assert.equal(translate("en", "navProjects"), "Projects");
|
||||
assert.equal(translate("zh", "navProjects"), "项目");
|
||||
assert.equal(translate("en", "templateSaved"), "Saved template");
|
||||
assert.equal(translate("zh", "templateSaved"), "已保存模板");
|
||||
assert.equal(translate("en", "navWorkflows"), "Workflows");
|
||||
assert.equal(translate("zh", "navWorkflows"), "工作流");
|
||||
assert.equal(
|
||||
|
||||
@ -7,6 +7,7 @@ export type TranslationKey =
|
||||
| "project"
|
||||
| "runs"
|
||||
| "localDev"
|
||||
| "navProjects"
|
||||
| "navAssets"
|
||||
| "navWorkflows"
|
||||
| "navRuns"
|
||||
@ -18,6 +19,35 @@ export type TranslationKey =
|
||||
| "chinese"
|
||||
| "assetsTitle"
|
||||
| "assetsDescription"
|
||||
| "projectsTitle"
|
||||
| "projectsDescription"
|
||||
| "projectNameLabel"
|
||||
| "projectDescriptionLabel"
|
||||
| "createProject"
|
||||
| "noProjectsYet"
|
||||
| "activeProject"
|
||||
| "openProject"
|
||||
| "storageConnectionsTitle"
|
||||
| "storageConnectionsDescription"
|
||||
| "createStorageConnection"
|
||||
| "storageProvider"
|
||||
| "bucket"
|
||||
| "endpoint"
|
||||
| "region"
|
||||
| "basePath"
|
||||
| "rootPath"
|
||||
| "noStorageConnectionsYet"
|
||||
| "datasetsTitle"
|
||||
| "datasetsDescription"
|
||||
| "datasetName"
|
||||
| "datasetDescription"
|
||||
| "sourceAsset"
|
||||
| "sourceAssets"
|
||||
| "storageConnection"
|
||||
| "storagePathLabel"
|
||||
| "createDataset"
|
||||
| "noDatasetsYet"
|
||||
| "latestDatasetVersion"
|
||||
| "localPath"
|
||||
| "registerLocalPath"
|
||||
| "noAssetsYet"
|
||||
@ -40,7 +70,16 @@ export type TranslationKey =
|
||||
| "recommendedNodes"
|
||||
| "noProbeReportYet"
|
||||
| "workflowsTitle"
|
||||
| "workflowTemplatesTitle"
|
||||
| "workflowTemplatesDescription"
|
||||
| "createWorkflow"
|
||||
| "createBlankWorkflow"
|
||||
| "createWorkflowFromTemplate"
|
||||
| "saveAsTemplate"
|
||||
| "templateName"
|
||||
| "templateDescription"
|
||||
| "templateSaved"
|
||||
| "noWorkflowTemplatesYet"
|
||||
| "noWorkflowsYet"
|
||||
| "latestVersion"
|
||||
| "workflowEditor"
|
||||
@ -110,8 +149,15 @@ export type TranslationKey =
|
||||
| "loadingArtifact"
|
||||
| "bootstrappingLocalWorkspace"
|
||||
| "failedLoadAssets"
|
||||
| "failedLoadStorageConnections"
|
||||
| "failedCreateStorageConnection"
|
||||
| "failedLoadDatasets"
|
||||
| "failedCreateDataset"
|
||||
| "failedRegisterAsset"
|
||||
| "failedLoadWorkflows"
|
||||
| "failedLoadTemplates"
|
||||
| "failedCreateTemplate"
|
||||
| "failedCreateWorkflowFromTemplate"
|
||||
| "failedLoadWorkflow"
|
||||
| "failedLoadRuns"
|
||||
| "failedLoadRunDetail"
|
||||
@ -121,6 +167,8 @@ export type TranslationKey =
|
||||
| "failedRetryTask"
|
||||
| "failedLoadArtifact"
|
||||
| "failedBootstrap"
|
||||
| "failedLoadProjects"
|
||||
| "failedCreateProject"
|
||||
| "validatedAssetCount"
|
||||
| "loadedAssetCount"
|
||||
| "success"
|
||||
@ -153,6 +201,7 @@ const TRANSLATIONS: Record<Language, Record<TranslationKey, string>> = {
|
||||
project: "Project",
|
||||
runs: "Runs",
|
||||
localDev: "Local Dev",
|
||||
navProjects: "Projects",
|
||||
navAssets: "Assets",
|
||||
navWorkflows: "Workflows",
|
||||
navRuns: "Runs",
|
||||
@ -165,6 +214,38 @@ const TRANSLATIONS: Record<Language, Record<TranslationKey, string>> = {
|
||||
assetsTitle: "Assets",
|
||||
assetsDescription:
|
||||
"Register local folders, archives, or dataset files, then probe them into managed asset metadata.",
|
||||
projectsTitle: "Projects",
|
||||
projectsDescription:
|
||||
"Create project spaces, switch the active project, and manage project-scoped assets, datasets, workflows, and runs.",
|
||||
projectNameLabel: "Project Name",
|
||||
projectDescriptionLabel: "Project Description",
|
||||
createProject: "Create Project",
|
||||
noProjectsYet: "No projects yet.",
|
||||
activeProject: "Active project",
|
||||
openProject: "Open Project",
|
||||
storageConnectionsTitle: "Storage Connections",
|
||||
storageConnectionsDescription:
|
||||
"Define where project datasets are stored, including local paths and object storage providers.",
|
||||
createStorageConnection: "Create Storage Connection",
|
||||
storageProvider: "Storage Provider",
|
||||
bucket: "Bucket",
|
||||
endpoint: "Endpoint",
|
||||
region: "Region",
|
||||
basePath: "Base Path",
|
||||
rootPath: "Root Path",
|
||||
noStorageConnectionsYet: "No storage connections yet.",
|
||||
datasetsTitle: "Datasets",
|
||||
datasetsDescription:
|
||||
"Create project datasets from source assets and bind them to a storage connection.",
|
||||
datasetName: "Dataset Name",
|
||||
datasetDescription: "Dataset Description",
|
||||
sourceAsset: "Source Asset",
|
||||
sourceAssets: "Source Assets",
|
||||
storageConnection: "Storage Connection",
|
||||
storagePathLabel: "Storage Path",
|
||||
createDataset: "Create Dataset",
|
||||
noDatasetsYet: "No datasets have been created yet.",
|
||||
latestDatasetVersion: "Latest dataset version",
|
||||
localPath: "Local Path",
|
||||
registerLocalPath: "Register Local Path",
|
||||
noAssetsYet: "No assets have been registered yet.",
|
||||
@ -187,7 +268,17 @@ const TRANSLATIONS: Record<Language, Record<TranslationKey, string>> = {
|
||||
recommendedNodes: "Recommended nodes",
|
||||
noProbeReportYet: "No probe report yet.",
|
||||
workflowsTitle: "Workflows",
|
||||
workflowTemplatesTitle: "Workflow Templates",
|
||||
workflowTemplatesDescription:
|
||||
"Start workflows from reusable templates or create a blank workflow directly in the project.",
|
||||
createWorkflow: "Create Workflow",
|
||||
createBlankWorkflow: "Create Blank Workflow",
|
||||
createWorkflowFromTemplate: "Create From Template",
|
||||
saveAsTemplate: "Save As Template",
|
||||
templateName: "Template Name",
|
||||
templateDescription: "Template Description",
|
||||
templateSaved: "Saved template",
|
||||
noWorkflowTemplatesYet: "No workflow templates yet.",
|
||||
noWorkflowsYet: "No workflows yet.",
|
||||
latestVersion: "Latest version",
|
||||
workflowEditor: "Workflow Editor",
|
||||
@ -257,8 +348,15 @@ const TRANSLATIONS: Record<Language, Record<TranslationKey, string>> = {
|
||||
loadingArtifact: "Loading artifact...",
|
||||
bootstrappingLocalWorkspace: "Bootstrapping local workspace...",
|
||||
failedLoadAssets: "Failed to load assets",
|
||||
failedLoadStorageConnections: "Failed to load storage connections",
|
||||
failedCreateStorageConnection: "Failed to create storage connection",
|
||||
failedLoadDatasets: "Failed to load datasets",
|
||||
failedCreateDataset: "Failed to create dataset",
|
||||
failedRegisterAsset: "Failed to register local asset",
|
||||
failedLoadWorkflows: "Failed to load workflows",
|
||||
failedLoadTemplates: "Failed to load workflow templates",
|
||||
failedCreateTemplate: "Failed to create workflow template",
|
||||
failedCreateWorkflowFromTemplate: "Failed to create workflow from template",
|
||||
failedLoadWorkflow: "Failed to load workflow",
|
||||
failedLoadRuns: "Failed to load runs",
|
||||
failedLoadRunDetail: "Failed to load run detail",
|
||||
@ -268,6 +366,8 @@ const TRANSLATIONS: Record<Language, Record<TranslationKey, string>> = {
|
||||
failedRetryTask: "Failed to retry task",
|
||||
failedLoadArtifact: "Failed to load artifact",
|
||||
failedBootstrap: "Failed to bootstrap local context",
|
||||
failedLoadProjects: "Failed to load projects",
|
||||
failedCreateProject: "Failed to create project",
|
||||
validatedAssetCount: "validated {count} asset{suffix}",
|
||||
loadedAssetCount: "loaded {count} bound asset{suffix}",
|
||||
success: "success",
|
||||
@ -299,6 +399,7 @@ const TRANSLATIONS: Record<Language, Record<TranslationKey, string>> = {
|
||||
project: "项目",
|
||||
runs: "运行",
|
||||
localDev: "本地开发",
|
||||
navProjects: "项目",
|
||||
navAssets: "数据资产",
|
||||
navWorkflows: "工作流",
|
||||
navRuns: "运行记录",
|
||||
@ -310,6 +411,35 @@ const TRANSLATIONS: Record<Language, Record<TranslationKey, string>> = {
|
||||
chinese: "中文",
|
||||
assetsTitle: "数据资产",
|
||||
assetsDescription: "注册本地目录、压缩包或数据集文件,并将其探测为受管资产元数据。",
|
||||
projectsTitle: "项目",
|
||||
projectsDescription: "创建项目、切换当前项目,并管理项目级资产、数据集、工作流与运行记录。",
|
||||
projectNameLabel: "项目名称",
|
||||
projectDescriptionLabel: "项目描述",
|
||||
createProject: "创建项目",
|
||||
noProjectsYet: "还没有项目。",
|
||||
activeProject: "当前项目",
|
||||
openProject: "打开项目",
|
||||
storageConnectionsTitle: "存储连接",
|
||||
storageConnectionsDescription: "定义项目数据集的存储位置,包括本地路径和对象存储提供方。",
|
||||
createStorageConnection: "创建存储连接",
|
||||
storageProvider: "存储提供方",
|
||||
bucket: "Bucket",
|
||||
endpoint: "Endpoint",
|
||||
region: "Region",
|
||||
basePath: "基础路径",
|
||||
rootPath: "根路径",
|
||||
noStorageConnectionsYet: "还没有存储连接。",
|
||||
datasetsTitle: "数据集",
|
||||
datasetsDescription: "从源资产创建项目数据集,并绑定到一个存储连接。",
|
||||
datasetName: "数据集名称",
|
||||
datasetDescription: "数据集描述",
|
||||
sourceAsset: "源资产",
|
||||
sourceAssets: "源资产",
|
||||
storageConnection: "存储连接",
|
||||
storagePathLabel: "存储路径",
|
||||
createDataset: "创建数据集",
|
||||
noDatasetsYet: "还没有创建任何数据集。",
|
||||
latestDatasetVersion: "最新数据集版本",
|
||||
localPath: "本地路径",
|
||||
registerLocalPath: "注册本地路径",
|
||||
noAssetsYet: "还没有注册任何资产。",
|
||||
@ -332,7 +462,16 @@ const TRANSLATIONS: Record<Language, Record<TranslationKey, string>> = {
|
||||
recommendedNodes: "推荐节点",
|
||||
noProbeReportYet: "还没有探测报告。",
|
||||
workflowsTitle: "工作流",
|
||||
workflowTemplatesTitle: "工作流模板",
|
||||
workflowTemplatesDescription: "从可复用模板创建工作流,或者直接在项目里创建空白工作流。",
|
||||
createWorkflow: "新建工作流",
|
||||
createBlankWorkflow: "创建空白工作流",
|
||||
createWorkflowFromTemplate: "从模板创建工作流",
|
||||
saveAsTemplate: "另存为模板",
|
||||
templateName: "模板名称",
|
||||
templateDescription: "模板描述",
|
||||
templateSaved: "已保存模板",
|
||||
noWorkflowTemplatesYet: "还没有工作流模板。",
|
||||
noWorkflowsYet: "还没有工作流。",
|
||||
latestVersion: "最新版本",
|
||||
workflowEditor: "工作流编辑器",
|
||||
@ -402,8 +541,15 @@ const TRANSLATIONS: Record<Language, Record<TranslationKey, string>> = {
|
||||
loadingArtifact: "正在加载产物...",
|
||||
bootstrappingLocalWorkspace: "正在初始化本地工作空间...",
|
||||
failedLoadAssets: "加载资产失败",
|
||||
failedLoadStorageConnections: "加载存储连接失败",
|
||||
failedCreateStorageConnection: "创建存储连接失败",
|
||||
failedLoadDatasets: "加载数据集失败",
|
||||
failedCreateDataset: "创建数据集失败",
|
||||
failedRegisterAsset: "注册本地资产失败",
|
||||
failedLoadWorkflows: "加载工作流失败",
|
||||
failedLoadTemplates: "加载工作流模板失败",
|
||||
failedCreateTemplate: "创建工作流模板失败",
|
||||
failedCreateWorkflowFromTemplate: "从模板创建工作流失败",
|
||||
failedLoadWorkflow: "加载工作流失败",
|
||||
failedLoadRuns: "加载运行列表失败",
|
||||
failedLoadRunDetail: "加载运行详情失败",
|
||||
@ -413,6 +559,8 @@ const TRANSLATIONS: Record<Language, Record<TranslationKey, string>> = {
|
||||
failedRetryTask: "重试任务失败",
|
||||
failedLoadArtifact: "加载产物失败",
|
||||
failedBootstrap: "初始化本地上下文失败",
|
||||
failedLoadProjects: "加载项目失败",
|
||||
failedCreateProject: "创建项目失败",
|
||||
validatedAssetCount: "已校验 {count} 个资产",
|
||||
loadedAssetCount: "已加载 {count} 个绑定资产",
|
||||
success: "成功",
|
||||
|
||||
@ -22,6 +22,7 @@ a {
|
||||
|
||||
button,
|
||||
input,
|
||||
select,
|
||||
textarea {
|
||||
font: inherit;
|
||||
}
|
||||
@ -66,6 +67,14 @@ textarea {
|
||||
min-width: 140px;
|
||||
}
|
||||
|
||||
.app-header__select {
|
||||
min-width: 220px;
|
||||
border: 1px solid #cbd5e1;
|
||||
border-radius: 10px;
|
||||
padding: 10px 12px;
|
||||
background: #f8fafc;
|
||||
}
|
||||
|
||||
.app-header__label {
|
||||
font-size: 12px;
|
||||
color: #6b7280;
|
||||
@ -178,6 +187,7 @@ textarea {
|
||||
}
|
||||
|
||||
.field-grid input,
|
||||
.field-grid select,
|
||||
.field-grid textarea {
|
||||
border: 1px solid #cbd5e1;
|
||||
border-radius: 10px;
|
||||
@ -228,6 +238,11 @@ textarea {
|
||||
color: #6b7280;
|
||||
}
|
||||
|
||||
.asset-card[data-active="true"] {
|
||||
border-color: #111827;
|
||||
box-shadow: 0 0 0 1px rgba(17, 24, 39, 0.08);
|
||||
}
|
||||
|
||||
.workflow-canvas-panel {
|
||||
overflow: hidden;
|
||||
}
|
||||
@ -273,6 +288,16 @@ textarea {
|
||||
font-size: 13px;
|
||||
}
|
||||
|
||||
.template-save-section {
|
||||
margin-top: 24px;
|
||||
padding-top: 16px;
|
||||
border-top: 1px solid #e5e7eb;
|
||||
}
|
||||
|
||||
.template-save-section h3 {
|
||||
margin: 0 0 12px;
|
||||
}
|
||||
|
||||
.workflow-canvas-feedback {
|
||||
margin-left: auto;
|
||||
padding: 6px 10px;
|
||||
|
||||
@ -6,6 +6,13 @@ EmboFlow is a browser-based embodied data engineering platform for ingesting raw
|
||||
|
||||
The platform is designed around plugin-based extensibility, but the first version should deliver a stable built-in core before opening broader extension surfaces.
|
||||
|
||||
The current V1 implementation exposes that core through four first-class product objects:
|
||||
|
||||
- `Project`
|
||||
- `Asset`
|
||||
- `Dataset`
|
||||
- `WorkflowTemplate`
|
||||
|
||||
## Primary Users
|
||||
|
||||
- Individual engineers building embodied datasets
|
||||
@ -19,12 +26,14 @@ Build a usable end-to-end platform that allows users to:
|
||||
|
||||
1. Log into a personal or team workspace
|
||||
2. Create a project
|
||||
3. Upload or import raw embodied data assets
|
||||
4. Auto-detect asset structure and generate preview summaries
|
||||
5. Compose processing pipelines on a canvas
|
||||
6. Configure node parameters and inject code into processing nodes
|
||||
7. Execute workflows asynchronously and inspect logs and outputs
|
||||
8. Export normalized delivery packages, training datasets, or training config files
|
||||
3. Configure project storage connections for local paths or object storage
|
||||
4. Upload or import raw embodied data assets
|
||||
5. Derive reusable datasets from project assets
|
||||
6. Auto-detect asset structure and generate preview summaries
|
||||
7. Start a workflow from a reusable template or compose one from a blank canvas
|
||||
8. Configure node parameters and inject code into processing nodes
|
||||
9. Execute workflows asynchronously and inspect logs and outputs
|
||||
10. Export normalized delivery packages, training datasets, or training config files
|
||||
|
||||
## Supported Input Formats in V1
|
||||
|
||||
@ -47,6 +56,7 @@ Build a usable end-to-end platform that allows users to:
|
||||
|
||||
## Major Workspaces
|
||||
|
||||
- Project Workspace: create and switch project contexts
|
||||
- Asset Workspace: upload, import, scan, probe, browse
|
||||
- Canvas Workspace: build and run workflows
|
||||
- Explore Workspace: inspect raw assets and processed outputs
|
||||
|
||||
@ -12,6 +12,7 @@ Top-level product areas:
|
||||
|
||||
- Workspace switcher
|
||||
- Project selector
|
||||
- Projects
|
||||
- Asset Workspace
|
||||
- Canvas Workspace
|
||||
- Explore Workspace
|
||||
@ -35,6 +36,7 @@ Recommended global header content:
|
||||
|
||||
Recommended primary navigation:
|
||||
|
||||
- Projects
|
||||
- Assets
|
||||
- Workflows
|
||||
- Runs
|
||||
@ -59,6 +61,12 @@ Purpose:
|
||||
|
||||
V1 should emphasize project-level organization because all major resources are project-scoped.
|
||||
|
||||
The current implementation now matches this with:
|
||||
|
||||
- a dedicated `Projects` sidebar entry
|
||||
- a header-level active project selector
|
||||
- project cards that let the user open a project directly into workflow authoring
|
||||
|
||||
## Screen 2: Asset Workspace
|
||||
|
||||
Purpose:
|
||||
@ -85,6 +93,12 @@ Key actions:
|
||||
- open preview
|
||||
- create workflow from asset
|
||||
|
||||
The current V1 runtime extends this screen beyond raw assets and treats project data management as one combined workspace:
|
||||
|
||||
- raw asset registration
|
||||
- storage connection creation for `local`, `minio`, `s3`, `bos`, and `oss`
|
||||
- dataset creation from selected source assets into a selected storage connection
|
||||
|
||||
## Screen 3: Asset Detail / Explore Entry
|
||||
|
||||
Purpose:
|
||||
@ -172,6 +186,12 @@ The current V1 authoring rules intentionally keep the graph model constrained so
|
||||
|
||||
The runtime header also now exposes a visible `中文 / English` language toggle and the main shell plus workflow authoring surface are translated through a lightweight i18n layer.
|
||||
|
||||
The workflow entry surface that leads into this editor is also now template-aware:
|
||||
|
||||
- the Workflows page lists reusable workflow templates for the active project
|
||||
- the user can create a project workflow from a template
|
||||
- the user can still create a blank workflow directly
|
||||
|
||||
### Right Configuration Panel
|
||||
|
||||
The right panel is schema-driven.
|
||||
@ -189,6 +209,8 @@ It should render:
|
||||
|
||||
This panel is critical. It should feel like a structured system console, not a generic form dump.
|
||||
|
||||
The current right panel also includes a workflow-level `Save As Template` section so an edited graph can be published back into the project template library.
|
||||
|
||||
## Screen 5: Workflow Run Detail
|
||||
|
||||
Purpose:
|
||||
|
||||
@ -28,6 +28,7 @@
|
||||
- `2026-03-27`: The current built-in-node pass enriches the worker execution context with bound asset metadata and gives the default Python implementations for `source-asset` and `validate-structure` real delivery-oriented behavior instead of placeholder output.
|
||||
- `2026-03-27`: The current web-authoring pass adds a visible zh/en language switcher, a lightweight i18n layer for the runtime shell, and a real React Flow canvas with persisted node positions and viewport instead of the earlier static node list.
|
||||
- `2026-03-27`: The follow-up canvas pass adds left-panel drag-and-drop node placement, localized canvas feedback, and V1 connection guards for self-edges, duplicates, cycles, invalid source/export directions, and multiple inbound edges.
|
||||
- `2026-03-30`: The current product-integration pass promotes projects, datasets, storage connections, and workflow templates into first-class runtime flows. The shell now has a dedicated Projects page, project switching, workflow template gallery, workflow creation from templates, and workflow-level save-as-template support.
|
||||
|
||||
---
|
||||
|
||||
|
||||
123
docs/plans/2026-03-30-project-dataset-template-design.md
Normal file
123
docs/plans/2026-03-30-project-dataset-template-design.md
Normal file
@ -0,0 +1,123 @@
|
||||
# EmboFlow Project Dataset Template Design
|
||||
|
||||
## Goal
|
||||
|
||||
Define the next V1 product slice that turns the current runtime skeleton into a project-centric data workflow console with first-class datasets, storage connections, and workflow templates.
|
||||
|
||||
## Approved Boundary
|
||||
|
||||
- `Asset` remains the raw input object
|
||||
- `Dataset` becomes a project-scoped first-class object
|
||||
- `StorageConnection` becomes the place where datasets choose their persistence target
|
||||
- `WorkflowTemplate` becomes the reusable authoring entrypoint for workflows
|
||||
|
||||
## Current Implementation Baseline
|
||||
|
||||
The current codebase already has:
|
||||
|
||||
- Mongo-backed `storage_connections`, `datasets`, `dataset_versions`, and `workflow_templates`
|
||||
- HTTP endpoints for creating and listing those objects
|
||||
- an asset page that already exposes storage connection and dataset creation forms
|
||||
- a workflow editor with a large React Flow canvas, node drag and drop, edge creation, and Python code-hook editing
|
||||
- workflow creation from blank definitions
|
||||
|
||||
The missing layer is product integration:
|
||||
|
||||
- project switching and project creation in the main shell
|
||||
- a visible project workspace instead of a fixed bootstrap project
|
||||
- workflow template selection on the workflows landing page
|
||||
- template-based workflow creation as a first-class action
|
||||
- saving an edited workflow as a reusable template
|
||||
|
||||
## Product Model
|
||||
|
||||
### Workspace
|
||||
|
||||
The workspace owns:
|
||||
|
||||
- projects
|
||||
- storage connections
|
||||
- workspace-scoped workflow templates
|
||||
|
||||
### Project
|
||||
|
||||
The project owns:
|
||||
|
||||
- assets
|
||||
- datasets
|
||||
- workflow definitions
|
||||
- workflow runs
|
||||
- project-scoped workflow templates
|
||||
|
||||
### Asset vs Dataset
|
||||
|
||||
- `Asset` is the raw import or registered source
|
||||
- `Dataset` is the reusable project data product
|
||||
- A dataset references one or more source assets and one storage connection
|
||||
- Dataset versions remain immutable snapshots under the dataset
|
||||
|
||||
## UX Changes
|
||||
|
||||
### Header
|
||||
|
||||
The header should expose:
|
||||
|
||||
- workspace name
|
||||
- active project selector
|
||||
- quick create project action
|
||||
- language switcher
|
||||
|
||||
### Projects Page
|
||||
|
||||
Add a dedicated projects page to:
|
||||
|
||||
- list existing projects
|
||||
- create a new project
|
||||
- switch the active project
|
||||
- show lightweight counts for assets, datasets, workflows, and runs
|
||||
|
||||
### Assets Page
|
||||
|
||||
Keep the existing asset page as the project data hub:
|
||||
|
||||
- raw asset registration
|
||||
- storage connection management
|
||||
- dataset creation
|
||||
- project asset list
|
||||
|
||||
### Workflows Page
|
||||
|
||||
Split the current workflows landing page into two clear entry paths:
|
||||
|
||||
- start from template
|
||||
- start from blank workflow
|
||||
|
||||
Each template card should support:
|
||||
|
||||
- create workflow from template
|
||||
- open the template-backed workflow after creation
|
||||
|
||||
### Workflow Editor
|
||||
|
||||
Keep the large canvas and runtime configuration model, and add:
|
||||
|
||||
- save current workflow as template
|
||||
- explicit template name and description inputs for that action
|
||||
- no reduction in current node-level editing power
|
||||
|
||||
## Implementation Rules
|
||||
|
||||
- do not replace the current `Asset` run binding model in this slice
|
||||
- do not move storage connection management to a different backend model
|
||||
- do not introduce a new visual framework for the canvas
|
||||
- reuse current Mongo collections and runtime store methods where possible
|
||||
|
||||
## Success Criteria
|
||||
|
||||
The slice is done when:
|
||||
|
||||
1. users can create and switch projects without restarting bootstrap context
|
||||
2. datasets are visibly project-scoped and backed by a chosen storage connection
|
||||
3. workflows can be created either from a template or from a blank definition
|
||||
4. edited workflows can be saved back as reusable templates
|
||||
5. the canvas remains the primary authoring surface with runtime config and Python hook editing intact
|
||||
239
docs/plans/2026-03-30-project-dataset-template-implementation.md
Normal file
239
docs/plans/2026-03-30-project-dataset-template-implementation.md
Normal file
@ -0,0 +1,239 @@
|
||||
# EmboFlow Project Dataset Template Implementation Plan
|
||||
|
||||
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
|
||||
|
||||
**Goal:** Turn the current runtime shell into a project-centric product surface with project switching, project creation, workflow templates, and first-class dataset/storage management.
|
||||
|
||||
**Architecture:** Reuse the existing Mongo runtime store and HTTP API for storage connections, datasets, dataset versions, and workflow templates. Extend the web runtime shell so the active project becomes selectable, project resources reload against the selected project, and workflows can be created from templates or from a blank canvas. Keep `Asset` as raw input and `Dataset` as the project data product.
|
||||
|
||||
**Tech Stack:** React, TypeScript, React Flow, Express runtime API, MongoDB, `tsx --test`, Python unittest, and repo guardrails.
|
||||
|
||||
---
|
||||
|
||||
### Task 1: Add Project Runtime API Client Support
|
||||
|
||||
**Files:**
|
||||
- Modify: `apps/web/src/runtime/api-client.ts`
|
||||
|
||||
**Step 1: Write the failing test**
|
||||
|
||||
Use the existing runtime integration coverage as the external contract and add a focused web runtime helper test only if a new pure helper is introduced.
|
||||
|
||||
**Step 2: Implement the minimal API additions**
|
||||
|
||||
Add:
|
||||
|
||||
- `listProjects(workspaceId)`
|
||||
- `createProject({ workspaceId, name, description, createdBy })`
|
||||
|
||||
Keep the current request/response style unchanged.
|
||||
|
||||
**Step 3: Verify**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
pnpm --filter web build
|
||||
```
|
||||
|
||||
Expected: PASS
|
||||
|
||||
**Step 4: Commit**
|
||||
|
||||
```bash
|
||||
git add apps/web/src/runtime/api-client.ts
|
||||
git commit -m ":sparkles: add project runtime api client support"
|
||||
```
|
||||
|
||||
### Task 2: Make The Shell Project-Aware
|
||||
|
||||
**Files:**
|
||||
- Modify: `apps/web/src/runtime/app.tsx`
|
||||
- Modify: `apps/web/src/runtime/i18n.tsx`
|
||||
- Modify: `apps/web/src/styles.css`
|
||||
|
||||
**Step 1: Write the failing test**
|
||||
|
||||
Add a focused runtime state or pure helper test if needed for active project resolution.
|
||||
|
||||
**Step 2: Implement**
|
||||
|
||||
Add:
|
||||
|
||||
- active project state in `App`
|
||||
- project list loading after bootstrap
|
||||
- project selector in the header
|
||||
- quick create project action
|
||||
- route-safe project switching behavior
|
||||
- a `Projects` nav item and page entry
|
||||
|
||||
**Step 3: Verify**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
pnpm --filter web build
|
||||
```
|
||||
|
||||
Expected: PASS
|
||||
|
||||
**Step 4: Commit**
|
||||
|
||||
```bash
|
||||
git add apps/web/src/runtime/app.tsx apps/web/src/runtime/i18n.tsx apps/web/src/styles.css
|
||||
git commit -m ":sparkles: add project-aware shell state"
|
||||
```
|
||||
|
||||
### Task 3: Add A Dedicated Projects Page
|
||||
|
||||
**Files:**
|
||||
- Modify: `apps/web/src/runtime/app.tsx`
|
||||
- Modify: `apps/web/src/runtime/i18n.tsx`
|
||||
|
||||
**Step 1: Write the failing test**
|
||||
|
||||
Add a web runtime test or browser validation script target if a helper is introduced.
|
||||
|
||||
**Step 2: Implement**
|
||||
|
||||
Create a projects workspace view that:
|
||||
|
||||
- lists projects for the current workspace
|
||||
- marks the active project
|
||||
- creates a project
|
||||
- lets the user switch into a project
|
||||
- shows lightweight resource counts derived from existing APIs
|
||||
|
||||
**Step 3: Verify**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
pnpm --filter web build
|
||||
```
|
||||
|
||||
Expected: PASS
|
||||
|
||||
**Step 4: Commit**
|
||||
|
||||
```bash
|
||||
git add apps/web/src/runtime/app.tsx apps/web/src/runtime/i18n.tsx
|
||||
git commit -m ":sparkles: add projects workspace"
|
||||
```
|
||||
|
||||
### Task 4: Turn Workflows Landing Into A Template Entry Surface
|
||||
|
||||
**Files:**
|
||||
- Modify: `apps/web/src/runtime/app.tsx`
|
||||
- Modify: `apps/web/src/runtime/i18n.tsx`
|
||||
|
||||
**Step 1: Write the failing test**
|
||||
|
||||
Add a focused browser validation script or pure helper test for template naming if needed.
|
||||
|
||||
**Step 2: Implement**
|
||||
|
||||
Update the workflows landing page to:
|
||||
|
||||
- load workflow templates for the active workspace/project
|
||||
- render template cards
|
||||
- create a workflow from a template
|
||||
- still support blank workflow creation
|
||||
- route into the created workflow editor
|
||||
|
||||
**Step 3: Verify**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
pnpm --filter web build
|
||||
```
|
||||
|
||||
Expected: PASS
|
||||
|
||||
**Step 4: Commit**
|
||||
|
||||
```bash
|
||||
git add apps/web/src/runtime/app.tsx apps/web/src/runtime/i18n.tsx
|
||||
git commit -m ":sparkles: add workflow template entry flow"
|
||||
```
|
||||
|
||||
### Task 5: Add Save-As-Template In The Workflow Editor
|
||||
|
||||
**Files:**
|
||||
- Modify: `apps/web/src/runtime/app.tsx`
|
||||
- Modify: `apps/web/src/runtime/i18n.tsx`
|
||||
|
||||
**Step 1: Write the failing test**
|
||||
|
||||
Add a minimal helper test if a template payload builder is introduced.
|
||||
|
||||
**Step 2: Implement**
|
||||
|
||||
Add editor controls to:
|
||||
|
||||
- enter template name and description
|
||||
- save the current draft/version payload as a workflow template
|
||||
- keep the large canvas and node runtime editing behavior intact
|
||||
|
||||
**Step 3: Verify**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
pnpm --filter web build
|
||||
```
|
||||
|
||||
Expected: PASS
|
||||
|
||||
**Step 4: Commit**
|
||||
|
||||
```bash
|
||||
git add apps/web/src/runtime/app.tsx apps/web/src/runtime/i18n.tsx
|
||||
git commit -m ":sparkles: add save as template workflow action"
|
||||
```
|
||||
|
||||
### Task 6: Update Docs And Run Full Verification
|
||||
|
||||
**Files:**
|
||||
- Modify: `README.md`
|
||||
- Modify: `design/00-overview/emboflow-platform-overview.md`
|
||||
- Modify: `design/04-ui-ux/information-architecture-and-key-screens.md`
|
||||
- Modify: `docs/plans/2026-03-26-emboflow-v1-foundation-and-mvp.md`
|
||||
|
||||
**Step 1: Update docs**
|
||||
|
||||
Document:
|
||||
|
||||
- project selector and projects workspace
|
||||
- dataset/storage management as first-class project features
|
||||
- workflow template entry and save-as-template flow
|
||||
|
||||
**Step 2: Run verification**
|
||||
|
||||
Run:
|
||||
|
||||
```bash
|
||||
make test
|
||||
make guardrails
|
||||
pnpm --filter web build
|
||||
```
|
||||
|
||||
Expected: PASS
|
||||
|
||||
**Step 3: Browser validation**
|
||||
|
||||
Validate locally that:
|
||||
|
||||
- the active project can be changed
|
||||
- a new project can be created
|
||||
- a workflow can be created from a template
|
||||
- a blank workflow can still be created
|
||||
- an edited workflow can be saved as a template
|
||||
|
||||
**Step 4: Commit**
|
||||
|
||||
```bash
|
||||
git add README.md design/00-overview/emboflow-platform-overview.md design/04-ui-ux/information-architecture-and-key-screens.md docs/plans/2026-03-26-emboflow-v1-foundation-and-mvp.md docs/plans/2026-03-30-project-dataset-template-design.md docs/plans/2026-03-30-project-dataset-template-implementation.md
|
||||
git commit -m ":memo: document project dataset and template flow"
|
||||
```
|
||||
Loading…
x
Reference in New Issue
Block a user