feat(flow): 新增流式执行模式与SSE支持
新增流式执行模式,通过SSE实时推送节点执行事件与日志 重构HTTP执行器与中间件,提取通用HTTP客户端组件 优化前端测试面板,支持流式模式切换与实时日志展示 更新依赖版本并修复密码哈希的随机数生成器问题 修复前端节点类型映射问题,确保Code节点表单可用
This commit is contained in:
919
backend/Cargo.lock
generated
919
backend/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -1,17 +1,17 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "udmin"
|
name = "udmin"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
edition = "2024" # ✅ 升级到最新 Rust Edition
|
edition = "2024"
|
||||||
default-run = "udmin"
|
default-run = "udmin"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
axum = "0.8.4"
|
axum = "0.8.4"
|
||||||
tokio = { version = "1.47.1", features = ["full"] }
|
tokio = { version = "1.47.1", features = ["full"] }
|
||||||
tower = "0.5.0"
|
tower = "0.5.2"
|
||||||
tower-http = { version = "0.6.6", features = ["cors", "trace"] }
|
tower-http = { version = "0.6.6", features = ["cors", "trace"] }
|
||||||
hyper = { version = "1" }
|
hyper = { version = "1" }
|
||||||
bytes = "1"
|
bytes = "1"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0.225", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
serde_with = "3.14.0"
|
serde_with = "3.14.0"
|
||||||
sea-orm = { version = "1.1.14", features = ["sqlx-mysql", "sqlx-sqlite", "sqlx-postgres", "runtime-tokio-rustls", "macros"] }
|
sea-orm = { version = "1.1.14", features = ["sqlx-mysql", "sqlx-sqlite", "sqlx-postgres", "runtime-tokio-rustls", "macros"] }
|
||||||
@ -21,26 +21,28 @@ uuid = { version = "1.11.0", features = ["serde", "v4"] }
|
|||||||
chrono = { version = "0.4", features = ["serde"] }
|
chrono = { version = "0.4", features = ["serde"] }
|
||||||
tracing = "0.1"
|
tracing = "0.1"
|
||||||
tracing-subscriber = { version = "0.3", features = ["env-filter", "fmt"] }
|
tracing-subscriber = { version = "0.3", features = ["env-filter", "fmt"] }
|
||||||
config = "0.14"
|
config = "0.15.16"
|
||||||
dotenvy = "0.15"
|
dotenvy = "0.15.7"
|
||||||
thiserror = "1.0"
|
thiserror = "2.0.16"
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
once_cell = "1.19.0"
|
once_cell = "1.19.0"
|
||||||
utoipa = { version = "5.4.0", features = ["axum_extras", "chrono", "uuid"] }
|
utoipa = { version = "5.4.0", features = ["axum_extras", "chrono", "uuid"] }
|
||||||
utoipa-swagger-ui = { version = "6.0.0", features = ["axum"] }
|
utoipa-swagger-ui = { version = "9.0.2", features = ["axum"] }
|
||||||
sha2 = "0.10"
|
sha2 = "0.10"
|
||||||
rand = "0.8"
|
rand = "0.9.2"
|
||||||
async-trait = "0.1"
|
async-trait = "0.1.89"
|
||||||
redis = { version = "0.27", features = ["tokio-comp", "connection-manager"] }
|
redis = { version = "0.32.5", features = ["tokio-comp", "connection-manager"] }
|
||||||
petgraph = "0.6"
|
petgraph = "0.8.2"
|
||||||
rhai = { version = "1.17", features = ["serde", "metadata", "internals"] }
|
rhai = { version = "1.23.4", features = ["serde", "metadata", "internals"] }
|
||||||
serde_yaml = "0.9"
|
serde_yaml = "0.9"
|
||||||
regex = "1.10"
|
regex = "1.11.2"
|
||||||
reqwest = { version = "0.11", features = ["json", "rustls-tls-native-roots"], default-features = false }
|
reqwest = { version = "0.12.23", features = ["json", "rustls-tls-native-roots"], default-features = false }
|
||||||
futures = "0.3"
|
futures = "0.3.31"
|
||||||
percent-encoding = "2.3"
|
percent-encoding = "2.3"
|
||||||
# 新增: QuickJS 运行时用于 JS 执行器(不启用额外特性)
|
# 新增: QuickJS 运行时用于 JS 执行器(不启用额外特性)
|
||||||
rquickjs = "0.8"
|
rquickjs = "0.9.0"
|
||||||
|
# 新增: 用于将 mpsc::Receiver 封装为 Stream(SSE)
|
||||||
|
tokio-stream = "0.1.17"
|
||||||
|
|
||||||
[dependencies.migration]
|
[dependencies.migration]
|
||||||
path = "migration"
|
path = "migration"
|
||||||
@ -48,3 +50,6 @@ path = "migration"
|
|||||||
[profile.release]
|
[profile.release]
|
||||||
lto = true
|
lto = true
|
||||||
codegen-units = 1
|
codegen-units = 1
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
wiremock = "0.6"
|
||||||
@ -14,14 +14,29 @@ pub enum ExecutionMode {
|
|||||||
|
|
||||||
impl Default for ExecutionMode { fn default() -> Self { ExecutionMode::Sync } }
|
impl Default for ExecutionMode { fn default() -> Self { ExecutionMode::Sync } }
|
||||||
|
|
||||||
|
// 新增:流式事件(用于 SSE)
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
#[serde(tag = "type")] // 带判别字段,便于前端识别事件类型
|
||||||
|
pub enum StreamEvent {
|
||||||
|
#[serde(rename = "node")]
|
||||||
|
Node { node_id: String, logs: Vec<String>, ctx: serde_json::Value },
|
||||||
|
#[serde(rename = "done")]
|
||||||
|
Done { ok: bool, ctx: serde_json::Value, logs: Vec<String> },
|
||||||
|
#[serde(rename = "error")]
|
||||||
|
Error { message: String },
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct DriveOptions {
|
pub struct DriveOptions {
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub max_steps: usize,
|
pub max_steps: usize,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub execution_mode: ExecutionMode,
|
pub execution_mode: ExecutionMode,
|
||||||
|
// 新增:事件通道(仅运行时使用,不做序列化/反序列化)
|
||||||
|
#[serde(default, skip_serializing, skip_deserializing)]
|
||||||
|
pub event_tx: Option<tokio::sync::mpsc::Sender<StreamEvent>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for DriveOptions {
|
impl Default for DriveOptions {
|
||||||
fn default() -> Self { Self { max_steps: 10_000, execution_mode: ExecutionMode::Sync } }
|
fn default() -> Self { Self { max_steps: 10_000, execution_mode: ExecutionMode::Sync, event_tx: None } }
|
||||||
}
|
}
|
||||||
@ -1,3 +1,12 @@
|
|||||||
|
//! 模块:流程 DSL 与自由布局 Design JSON 的解析、校验与构建。
|
||||||
|
//! 主要内容:
|
||||||
|
//! - FlowDSL/NodeDSL/EdgeDSL:较为“表述性”的简化 DSL 结构(用于外部接口/入库)。
|
||||||
|
//! - DesignSyntax/NodeSyntax/EdgeSyntax:与前端自由布局 JSON 对齐的结构(含 source_port_id 等)。
|
||||||
|
//! - validate_design:基础校验(节点 ID 唯一、至少包含一个 start 与一个 end、边的引用合法)。
|
||||||
|
//! - build_chain_from_design:将自由布局 JSON 转换为内部 ChainDef(含条件节点 AND 组装等启发式与兼容逻辑)。
|
||||||
|
//! - chain_from_design_json:统一入口,支持字符串/对象两种输入,做兼容字段回填后再校验并构建。
|
||||||
|
//! 说明:尽量保持向后兼容;在条件节点的出边组装上采用启发式(例如:单出边 + 多条件 => 组装为 AND 条件组)。
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use anyhow::bail;
|
use anyhow::bail;
|
||||||
@ -5,36 +14,53 @@ use anyhow::bail;
|
|||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct FlowDSL {
|
pub struct FlowDSL {
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
|
/// 流程名称(可选)
|
||||||
pub name: String,
|
pub name: String,
|
||||||
#[serde(default, alias = "executionMode")]
|
#[serde(default, alias = "executionMode")]
|
||||||
|
/// 执行模式(兼容前端 executionMode),如:sync/async(目前仅占位)
|
||||||
pub execution_mode: Option<String>,
|
pub execution_mode: Option<String>,
|
||||||
|
/// 节点列表(按声明顺序)
|
||||||
pub nodes: Vec<NodeDSL>,
|
pub nodes: Vec<NodeDSL>,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
|
/// 边列表(from -> to,可选 condition)
|
||||||
pub edges: Vec<EdgeDSL>,
|
pub edges: Vec<EdgeDSL>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct NodeDSL {
|
pub struct NodeDSL {
|
||||||
|
/// 节点唯一 ID(字符串)
|
||||||
pub id: String,
|
pub id: String,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
|
/// 节点类型:start / end / task / condition(开始/结束/任务/条件)
|
||||||
pub kind: String, // 节点类型:start / end / task / condition(开始/结束/任务/条件)
|
pub kind: String, // 节点类型:start / end / task / condition(开始/结束/任务/条件)
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
|
/// 节点显示名称(可选)
|
||||||
pub name: String,
|
pub name: String,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
|
/// 任务标识(绑定执行器),如 http/db/variable/script_*(可选)
|
||||||
pub task: Option<String>,
|
pub task: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct EdgeDSL {
|
pub struct EdgeDSL {
|
||||||
|
/// 起点节点 ID(别名:source/from)
|
||||||
#[serde(alias = "source", alias = "from", rename = "from")]
|
#[serde(alias = "source", alias = "from", rename = "from")]
|
||||||
pub from: String,
|
pub from: String,
|
||||||
|
/// 终点节点 ID(别名:target/to)
|
||||||
#[serde(alias = "target", alias = "to", rename = "to")]
|
#[serde(alias = "target", alias = "to", rename = "to")]
|
||||||
pub to: String,
|
pub to: String,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
|
/// 条件表达式(字符串):
|
||||||
|
/// - 若为 JSON 字符串(以 { 或 [ 开头),则按 JSON 条件集合进行求值;
|
||||||
|
/// - 否则按 Rhai 表达式求值;
|
||||||
|
/// - 空字符串/None 表示无条件。
|
||||||
pub condition: Option<String>,
|
pub condition: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<FlowDSL> for super::domain::ChainDef {
|
impl From<FlowDSL> for super::domain::ChainDef {
|
||||||
|
/// 将简化 DSL 转换为内部 ChainDef:
|
||||||
|
/// - kind 映射:start/end/condition/其他->task;支持 decision 别名 -> condition。
|
||||||
|
/// - 直接搬运 edges 的 from/to/condition。
|
||||||
fn from(v: FlowDSL) -> Self {
|
fn from(v: FlowDSL) -> Self {
|
||||||
super::domain::ChainDef {
|
super::domain::ChainDef {
|
||||||
name: v.name,
|
name: v.name,
|
||||||
@ -71,34 +97,47 @@ impl From<FlowDSL> for super::domain::ChainDef {
|
|||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct DesignSyntax {
|
pub struct DesignSyntax {
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
|
/// 设计名称(可选)
|
||||||
pub name: String,
|
pub name: String,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
|
/// 节点集合(自由布局)
|
||||||
pub nodes: Vec<NodeSyntax>,
|
pub nodes: Vec<NodeSyntax>,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
|
/// 边集合(自由布局)
|
||||||
pub edges: Vec<EdgeSyntax>,
|
pub edges: Vec<EdgeSyntax>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct NodeSyntax {
|
pub struct NodeSyntax {
|
||||||
|
/// 节点 ID
|
||||||
pub id: String,
|
pub id: String,
|
||||||
#[serde(rename = "type", default)]
|
#[serde(rename = "type", default)]
|
||||||
|
/// 前端类型:start | end | condition | http | db | task | script_*(用于推断具体执行器)
|
||||||
pub kind: String, // 取值: start | end | condition | http | db | task(开始/结束/条件/HTTP/数据库/通用任务)
|
pub kind: String, // 取值: start | end | condition | http | db | task(开始/结束/条件/HTTP/数据库/通用任务)
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
|
/// 节点附加数据:title/conditions/scripts 等
|
||||||
pub data: serde_json::Value,
|
pub data: serde_json::Value,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct EdgeSyntax {
|
pub struct EdgeSyntax {
|
||||||
|
/// 起点(兼容 sourceNodeID/source/from)
|
||||||
#[serde(alias = "sourceNodeID", alias = "source", alias = "from")]
|
#[serde(alias = "sourceNodeID", alias = "source", alias = "from")]
|
||||||
pub from: String,
|
pub from: String,
|
||||||
|
/// 终点(兼容 targetNodeID/target/to)
|
||||||
#[serde(alias = "targetNodeID", alias = "target", alias = "to")]
|
#[serde(alias = "targetNodeID", alias = "target", alias = "to")]
|
||||||
pub to: String,
|
pub to: String,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
|
/// 源端口 ID:用于条件节点端口到条件 key 的兼容映射;
|
||||||
|
/// 特殊值 and/all/group/true 表示将节点内所有 conditions 的 value 组装为 AND 组。
|
||||||
pub source_port_id: Option<String>,
|
pub source_port_id: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// 从 design_json(前端流程 JSON)构建 ChainDef
|
/// 设计级别校验:
|
||||||
|
/// - 节点 ID 唯一且非空;
|
||||||
|
/// - 至少一个 start 与一个 end;
|
||||||
|
/// - 边的 from/to 必须指向已知节点。
|
||||||
fn validate_design(design: &DesignSyntax) -> anyhow::Result<()> {
|
fn validate_design(design: &DesignSyntax) -> anyhow::Result<()> {
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
let mut ids = HashSet::new();
|
let mut ids = HashSet::new();
|
||||||
@ -129,6 +168,13 @@ fn validate_design(design: &DesignSyntax) -> anyhow::Result<()> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// 将自由布局 DesignSyntax 转换为内部 ChainDef:
|
||||||
|
/// - 节点:推断 kind/name/task(含 scripts 与 inline script/expr 的兼容);
|
||||||
|
/// - 边:
|
||||||
|
/// * 条件节点:支持 source_port_id 到 data.conditions 的旧版映射;
|
||||||
|
/// * 当 source_port_id 为空或为 and/all/group/true,取 conditions 的 value 组成 AND 组;
|
||||||
|
/// * 启发式:若条件节点仅一条出边且包含多个 conditions,即便 source_port_id 指向具体 key,也回退为 AND 组;
|
||||||
|
/// * 非条件节点:不处理条件。
|
||||||
fn build_chain_from_design(design: &DesignSyntax) -> anyhow::Result<super::domain::ChainDef> {
|
fn build_chain_from_design(design: &DesignSyntax) -> anyhow::Result<super::domain::ChainDef> {
|
||||||
use super::domain::{ChainDef, NodeDef, NodeId, NodeKind, LinkDef};
|
use super::domain::{ChainDef, NodeDef, NodeId, NodeKind, LinkDef};
|
||||||
|
|
||||||
|
|||||||
@ -11,7 +11,17 @@ use std::cell::RefCell;
|
|||||||
use rhai::AST;
|
use rhai::AST;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
|
||||||
// 将常用的正则匹配暴露给表达式使用
|
// 模块:流程执行引擎(engine.rs)
|
||||||
|
// 作用:驱动 ChainDef 流程图,支持:
|
||||||
|
// - 同步/异步(Fire-and-Forget)任务执行
|
||||||
|
// - 条件路由(Rhai 表达式与 JSON 条件)与无条件回退
|
||||||
|
// - 并发分支 fan-out 与 join_all 等待
|
||||||
|
// - SSE 实时事件推送(逐行增量 + 节点级切片)
|
||||||
|
// 设计要点:
|
||||||
|
// - 表达式执行使用 thread_local 的 Rhai Engine 与 AST 缓存,避免全局 Send/Sync 限制
|
||||||
|
// - 共享上下文使用 RwLock 包裹 serde_json::Value;日志聚合使用 Mutex<Vec<String>>
|
||||||
|
// - 不做冲突校验:允许并发修改;最后写回/写入按代码路径覆盖
|
||||||
|
//
|
||||||
fn regex_match(s: &str, pat: &str) -> bool {
|
fn regex_match(s: &str, pat: &str) -> bool {
|
||||||
Regex::new(pat).map(|re| re.is_match(s)).unwrap_or(false)
|
Regex::new(pat).map(|re| re.is_match(s)).unwrap_or(false)
|
||||||
}
|
}
|
||||||
@ -135,7 +145,7 @@ impl FlowEngine {
|
|||||||
pub fn builder() -> FlowEngineBuilder { FlowEngineBuilder::default() }
|
pub fn builder() -> FlowEngineBuilder { FlowEngineBuilder::default() }
|
||||||
|
|
||||||
pub async fn drive(&self, chain: &ChainDef, ctx: serde_json::Value, opts: DriveOptions) -> anyhow::Result<(serde_json::Value, Vec<String>)> {
|
pub async fn drive(&self, chain: &ChainDef, ctx: serde_json::Value, opts: DriveOptions) -> anyhow::Result<(serde_json::Value, Vec<String>)> {
|
||||||
// 1) 选取起点
|
// 1) 选取起点:优先 Start;否则入度为 0;再否则第一个节点
|
||||||
// 查找 start:优先 Start 节点;否则选择入度为 0 的第一个节点;再否则回退第一个节点
|
// 查找 start:优先 Start 节点;否则选择入度为 0 的第一个节点;再否则回退第一个节点
|
||||||
let start = if let Some(n) = chain
|
let start = if let Some(n) = chain
|
||||||
.nodes
|
.nodes
|
||||||
@ -208,10 +218,30 @@ async fn drive_from(
|
|||||||
// 进入节点:打点
|
// 进入节点:打点
|
||||||
let node_id_str = node.id.0.clone();
|
let node_id_str = node.id.0.clone();
|
||||||
let node_start = Instant::now();
|
let node_start = Instant::now();
|
||||||
|
// 进入节点前记录当前日志长度,便于节点结束时做切片
|
||||||
|
let pre_len = { logs.lock().await.len() };
|
||||||
|
// 在每次追加日志时同步发送一条增量 SSE 事件(仅 1 行日志),以提升实时性
|
||||||
|
// push_and_emit:
|
||||||
|
// - 先将单行日志 push 到共享日志
|
||||||
|
// - 若存在 SSE 通道,截取上下文快照并发送单行增量事件
|
||||||
|
async fn push_and_emit(
|
||||||
|
logs: &std::sync::Arc<tokio::sync::Mutex<Vec<String>>>,
|
||||||
|
opts: &super::context::DriveOptions,
|
||||||
|
node_id: &str,
|
||||||
|
ctx: &std::sync::Arc<tokio::sync::RwLock<serde_json::Value>>,
|
||||||
|
msg: String,
|
||||||
|
) {
|
||||||
{
|
{
|
||||||
let mut lg = logs.lock().await;
|
let mut lg = logs.lock().await;
|
||||||
lg.push(format!("enter node: {}", node.id.0));
|
lg.push(msg.clone());
|
||||||
}
|
}
|
||||||
|
if let Some(tx) = opts.event_tx.as_ref() {
|
||||||
|
let ctx_snapshot = { ctx.read().await.clone() };
|
||||||
|
crate::middlewares::sse::emit_node(&tx, node_id.to_string(), vec![msg], ctx_snapshot).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// enter 节点也实时推送
|
||||||
|
push_and_emit(&logs, &opts, &node_id_str, &ctx, format!("enter node: {}", node.id.0)).await;
|
||||||
info!(target: "udmin.flow", "enter node: {}", node.id.0);
|
info!(target: "udmin.flow", "enter node: {}", node.id.0);
|
||||||
|
|
||||||
// 执行任务
|
// 执行任务
|
||||||
@ -221,14 +251,22 @@ async fn drive_from(
|
|||||||
ExecutionMode::Sync => {
|
ExecutionMode::Sync => {
|
||||||
// 使用快照执行,结束后整体写回(允许最后写入覆盖并发修改;程序端不做冲突校验)
|
// 使用快照执行,结束后整体写回(允许最后写入覆盖并发修改;程序端不做冲突校验)
|
||||||
let mut local_ctx = { ctx.read().await.clone() };
|
let mut local_ctx = { ctx.read().await.clone() };
|
||||||
task.execute(&node.id, node, &mut local_ctx).await?;
|
match task.execute(&node.id, node, &mut local_ctx).await {
|
||||||
|
Ok(_) => {
|
||||||
{ let mut w = ctx.write().await; *w = local_ctx; }
|
{ let mut w = ctx.write().await; *w = local_ctx; }
|
||||||
{
|
push_and_emit(&logs, &opts, &node_id_str, &ctx, format!("exec task: {} (sync)", task_name)).await;
|
||||||
let mut lg = logs.lock().await;
|
|
||||||
lg.push(format!("exec task: {} (sync)", task_name));
|
|
||||||
}
|
|
||||||
info!(target: "udmin.flow", "exec task: {} (sync)", task_name);
|
info!(target: "udmin.flow", "exec task: {} (sync)", task_name);
|
||||||
}
|
}
|
||||||
|
Err(e) => {
|
||||||
|
let err_msg = format!("task error: {}: {}", task_name, e);
|
||||||
|
push_and_emit(&logs, &opts, &node_id_str, &ctx, err_msg.clone()).await;
|
||||||
|
// 捕获快照并返回 DriveError
|
||||||
|
let ctx_snapshot = { ctx.read().await.clone() };
|
||||||
|
let logs_snapshot = { logs.lock().await.clone() };
|
||||||
|
return Err(anyhow::Error::new(DriveError { node_id: node_id_str.clone(), ctx: ctx_snapshot, logs: logs_snapshot, message: err_msg }));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
ExecutionMode::AsyncFireAndForget => {
|
ExecutionMode::AsyncFireAndForget => {
|
||||||
// fire-and-forget:基于快照执行,不写回共享 ctx(变量任务除外:做有界差异写回)
|
// fire-and-forget:基于快照执行,不写回共享 ctx(变量任务除外:做有界差异写回)
|
||||||
let task_ctx = { ctx.read().await.clone() };
|
let task_ctx = { ctx.read().await.clone() };
|
||||||
@ -238,6 +276,7 @@ async fn drive_from(
|
|||||||
let node_def = node.clone();
|
let node_def = node.clone();
|
||||||
let logs_clone = logs.clone();
|
let logs_clone = logs.clone();
|
||||||
let ctx_clone = ctx.clone();
|
let ctx_clone = ctx.clone();
|
||||||
|
let event_tx_opt = opts.event_tx.clone();
|
||||||
tokio::spawn(async move {
|
tokio::spawn(async move {
|
||||||
let mut c = task_ctx.clone();
|
let mut c = task_ctx.clone();
|
||||||
let _ = task_arc.execute(&node_id, &node_def, &mut c).await;
|
let _ = task_arc.execute(&node_id, &node_def, &mut c).await;
|
||||||
@ -268,25 +307,31 @@ async fn drive_from(
|
|||||||
let mut lg = logs_clone.lock().await;
|
let mut lg = logs_clone.lock().await;
|
||||||
lg.push(format!("exec task done (async): {} (writeback variable)", name_for_log));
|
lg.push(format!("exec task done (async): {} (writeback variable)", name_for_log));
|
||||||
}
|
}
|
||||||
|
// 实时推送异步完成日志
|
||||||
|
if let Some(tx) = event_tx_opt.as_ref() {
|
||||||
|
let ctx_snapshot = { ctx_clone.read().await.clone() };
|
||||||
|
crate::middlewares::sse::emit_node(&tx, node_id.0.clone(), vec![format!("exec task done (async): {} (writeback variable)", name_for_log)], ctx_snapshot).await;
|
||||||
|
}
|
||||||
info!(target: "udmin.flow", "exec task done (async): {} (writeback variable)", name_for_log);
|
info!(target: "udmin.flow", "exec task done (async): {} (writeback variable)", name_for_log);
|
||||||
} else {
|
} else {
|
||||||
{
|
{
|
||||||
let mut lg = logs_clone.lock().await;
|
let mut lg = logs_clone.lock().await;
|
||||||
lg.push(format!("exec task done (async): {}", name_for_log));
|
lg.push(format!("exec task done (async): {}", name_for_log));
|
||||||
}
|
}
|
||||||
|
// 实时推送异步完成日志
|
||||||
|
if let Some(tx) = event_tx_opt.as_ref() {
|
||||||
|
let ctx_snapshot = { ctx_clone.read().await.clone() };
|
||||||
|
crate::middlewares::sse::emit_node(&tx, node_id.0.clone(), vec![format!("exec task done (async): {}", name_for_log)], ctx_snapshot).await;
|
||||||
|
}
|
||||||
info!(target: "udmin.flow", "exec task done (async): {}", name_for_log);
|
info!(target: "udmin.flow", "exec task done (async): {}", name_for_log);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
{
|
push_and_emit(&logs, &opts, &node_id_str, &ctx, format!("spawn task: {} (async)", task_name)).await;
|
||||||
let mut lg = logs.lock().await;
|
|
||||||
lg.push(format!("spawn task: {} (async)", task_name));
|
|
||||||
}
|
|
||||||
info!(target: "udmin.flow", "spawn task: {} (async)", task_name);
|
info!(target: "udmin.flow", "spawn task: {} (async)", task_name);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let mut lg = logs.lock().await;
|
push_and_emit(&logs, &opts, &node_id_str, &ctx, format!("task not found: {} (skip)", task_name)).await;
|
||||||
lg.push(format!("task not found: {} (skip)", task_name));
|
|
||||||
info!(target: "udmin.flow", "task not found: {} (skip)", task_name);
|
info!(target: "udmin.flow", "task not found: {} (skip)", task_name);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -294,11 +339,13 @@ async fn drive_from(
|
|||||||
// End 节点:记录耗时后结束
|
// End 节点:记录耗时后结束
|
||||||
if matches!(node.kind, NodeKind::End) {
|
if matches!(node.kind, NodeKind::End) {
|
||||||
let duration = node_start.elapsed().as_millis();
|
let duration = node_start.elapsed().as_millis();
|
||||||
{
|
push_and_emit(&logs, &opts, &node_id_str, &ctx, format!("leave node: {} {} ms", node_id_str, duration)).await;
|
||||||
let mut lg = logs.lock().await;
|
|
||||||
lg.push(format!("leave node: {} {} ms", node_id_str, duration));
|
|
||||||
}
|
|
||||||
info!(target: "udmin.flow", "leave node: {} {} ms", node_id_str, duration);
|
info!(target: "udmin.flow", "leave node: {} {} ms", node_id_str, duration);
|
||||||
|
if let Some(tx) = opts.event_tx.as_ref() {
|
||||||
|
let node_logs = { let lg = logs.lock().await; lg[pre_len..].to_vec() };
|
||||||
|
let ctx_snapshot = { ctx.read().await.clone() };
|
||||||
|
crate::middlewares::sse::emit_node(&tx, node_id_str.clone(), node_logs, ctx_snapshot).await;
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -367,7 +414,13 @@ async fn drive_from(
|
|||||||
let mut lg = logs.lock().await;
|
let mut lg = logs.lock().await;
|
||||||
lg.push(format!("leave node: {} {} ms", node_id_str, duration));
|
lg.push(format!("leave node: {} {} ms", node_id_str, duration));
|
||||||
}
|
}
|
||||||
|
push_and_emit(&logs, &opts, &node_id_str, &ctx, format!("leave node: {} {} ms", node_id_str, duration)).await;
|
||||||
info!(target: "udmin.flow", "leave node: {} {} ms", node_id_str, duration);
|
info!(target: "udmin.flow", "leave node: {} {} ms", node_id_str, duration);
|
||||||
|
if let Some(tx) = opts.event_tx.as_ref() {
|
||||||
|
let node_logs = { let lg = logs.lock().await; lg[pre_len..].to_vec() };
|
||||||
|
let ctx_snapshot = { ctx.read().await.clone() };
|
||||||
|
crate::middlewares::sse::emit_node(&tx, node_id_str.clone(), node_logs, ctx_snapshot).await;
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -379,6 +432,11 @@ async fn drive_from(
|
|||||||
lg.push(format!("leave node: {} {} ms", node_id_str, duration));
|
lg.push(format!("leave node: {} {} ms", node_id_str, duration));
|
||||||
}
|
}
|
||||||
info!(target: "udmin.flow", "leave node: {} {} ms", node_id_str, duration);
|
info!(target: "udmin.flow", "leave node: {} {} ms", node_id_str, duration);
|
||||||
|
if let Some(tx) = opts.event_tx.as_ref() {
|
||||||
|
let node_logs = { let lg = logs.lock().await; lg[pre_len..].to_vec() };
|
||||||
|
let ctx_snapshot = { ctx.read().await.clone() };
|
||||||
|
crate::middlewares::sse::emit_node(&tx, node_id_str.clone(), node_logs, ctx_snapshot).await;
|
||||||
|
}
|
||||||
current = nexts.remove(0);
|
current = nexts.remove(0);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@ -405,6 +463,11 @@ async fn drive_from(
|
|||||||
lg.push(format!("leave node: {} {} ms", node_id_str, duration));
|
lg.push(format!("leave node: {} {} ms", node_id_str, duration));
|
||||||
}
|
}
|
||||||
info!(target: "udmin.flow", "leave node: {} {} ms", node_id_str, duration);
|
info!(target: "udmin.flow", "leave node: {} {} ms", node_id_str, duration);
|
||||||
|
if let Some(tx) = opts.event_tx.as_ref() {
|
||||||
|
let node_logs = { let lg = logs.lock().await; lg[pre_len..].to_vec() };
|
||||||
|
let ctx_snapshot = { ctx.read().await.clone() };
|
||||||
|
crate::middlewares::sse::emit_node(&tx, node_id_str.clone(), node_logs, ctx_snapshot).await;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -427,138 +490,19 @@ impl Default for FlowEngine {
|
|||||||
fn default() -> Self { Self { tasks: crate::flow::task::default_registry() } }
|
fn default() -> Self { Self { tasks: crate::flow::task::default_registry() } }
|
||||||
}
|
}
|
||||||
|
|
||||||
/* moved to executors::condition
|
|
||||||
fn eval_condition_json(ctx: &serde_json::Value, cond: &serde_json::Value) -> anyhow::Result<bool> {
|
#[derive(Debug, Clone)]
|
||||||
-fn eval_condition_json(ctx: &serde_json::Value, cond: &serde_json::Value) -> anyhow::Result<bool> {
|
pub struct DriveError {
|
||||||
- // 支持前端 Condition 组件导出的: { left:{type, content}, operator, right? }
|
pub node_id: String,
|
||||||
- use serde_json::Value as V;
|
pub ctx: serde_json::Value,
|
||||||
-
|
pub logs: Vec<String>,
|
||||||
- let left = cond.get("left").ok_or_else(|| anyhow::anyhow!("missing left"))?;
|
pub message: String,
|
||||||
- let op_raw = cond.get("operator").and_then(|v| v.as_str()).unwrap_or("");
|
}
|
||||||
- let right = cond.get("right");
|
|
||||||
-
|
impl std::fmt::Display for DriveError {
|
||||||
- let lval = resolve_value(ctx, left)?;
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
- let rval = match right { Some(v) => Some(resolve_value(ctx, v)?), None => None };
|
write!(f, "{}", self.message)
|
||||||
-
|
|
||||||
- // 归一化操作符:忽略大小写,替换下划线为空格
|
|
||||||
- let op = op_raw.trim().to_lowercase().replace('_', " ");
|
|
||||||
-
|
|
||||||
- // 工具函数
|
|
||||||
- fn to_f64(v: &V) -> Option<f64> {
|
|
||||||
- match v {
|
|
||||||
- V::Number(n) => n.as_f64(),
|
|
||||||
- V::String(s) => s.parse::<f64>().ok(),
|
|
||||||
- _ => None,
|
|
||||||
- }
|
|
||||||
- }
|
|
||||||
- fn is_empty_val(v: &V) -> bool {
|
|
||||||
- match v {
|
|
||||||
- V::Null => true,
|
|
||||||
- V::String(s) => s.trim().is_empty(),
|
|
||||||
- V::Array(a) => a.is_empty(),
|
|
||||||
- V::Object(m) => m.is_empty(),
|
|
||||||
- _ => false,
|
|
||||||
- }
|
|
||||||
- }
|
|
||||||
- fn json_equal(a: &V, b: &V) -> bool {
|
|
||||||
- match (a, b) {
|
|
||||||
- (V::Number(_), V::Number(_)) | (V::Number(_), V::String(_)) | (V::String(_), V::Number(_)) => {
|
|
||||||
- match (to_f64(a), to_f64(b)) { (Some(x), Some(y)) => x == y, _ => a == b }
|
|
||||||
- }
|
|
||||||
- _ => a == b,
|
|
||||||
- }
|
|
||||||
- }
|
|
||||||
- fn contains(left: &V, right: &V) -> bool {
|
|
||||||
- match (left, right) {
|
|
||||||
- (V::String(s), V::String(t)) => s.contains(t),
|
|
||||||
- (V::Array(arr), r) => arr.iter().any(|x| json_equal(x, r)),
|
|
||||||
- (V::Object(map), V::String(key)) => map.contains_key(key),
|
|
||||||
- _ => false,
|
|
||||||
- }
|
|
||||||
- }
|
|
||||||
- fn in_op(left: &V, right: &V) -> bool {
|
|
||||||
- match right {
|
|
||||||
- V::Array(arr) => arr.iter().any(|x| json_equal(left, x)),
|
|
||||||
- V::Object(map) => match left { V::String(k) => map.contains_key(k), _ => false },
|
|
||||||
- V::String(hay) => match left { V::String(needle) => hay.contains(needle), _ => false },
|
|
||||||
- _ => false,
|
|
||||||
- }
|
|
||||||
- }
|
|
||||||
- fn bool_like(v: &V) -> bool {
|
|
||||||
- match v {
|
|
||||||
- V::Bool(b) => *b,
|
|
||||||
- V::Null => false,
|
|
||||||
- V::Number(n) => n.as_f64().map(|x| x != 0.0).unwrap_or(false),
|
|
||||||
- V::String(s) => {
|
|
||||||
- let s_l = s.trim().to_lowercase();
|
|
||||||
- if s_l == "true" { true } else if s_l == "false" { false } else { !s_l.is_empty() }
|
|
||||||
- }
|
|
||||||
- V::Array(a) => !a.is_empty(),
|
|
||||||
- V::Object(m) => !m.is_empty(),
|
|
||||||
- }
|
|
||||||
- }
|
|
||||||
-
|
|
||||||
- let res = match (op.as_str(), &lval, &rval) {
|
|
||||||
- // 等于 / 不等于(适配所有 JSON 类型;数字按 f64 比较,其他走深度相等)
|
|
||||||
- ("equal" | "equals" | "==" | "eq", l, Some(r)) => json_equal(l, r),
|
|
||||||
- ("not equal" | "!=" | "not equals" | "neq", l, Some(r)) => !json_equal(l, r),
|
|
||||||
-
|
|
||||||
- // 数字比较
|
|
||||||
- ("greater than" | ">" | "gt", l, Some(r)) => match (to_f64(l), to_f64(r)) { (Some(a), Some(b)) => a > b, _ => false },
|
|
||||||
- ("greater than or equal" | ">=" | "gte" | "ge", l, Some(r)) => match (to_f64(l), to_f64(r)) { (Some(a), Some(b)) => a >= b, _ => false },
|
|
||||||
- ("less than" | "<" | "lt", l, Some(r)) => match (to_f64(l), to_f64(r)) { (Some(a), Some(b)) => a < b, _ => false },
|
|
||||||
- ("less than or equal" | "<=" | "lte" | "le", l, Some(r)) => match (to_f64(l), to_f64(r)) { (Some(a), Some(b)) => a <= b, _ => false },
|
|
||||||
-
|
|
||||||
- // 包含 / 不包含(字符串、数组、对象(键))
|
|
||||||
- ("contains", l, Some(r)) => contains(l, r),
|
|
||||||
- ("not contains", l, Some(r)) => !contains(l, r),
|
|
||||||
-
|
|
||||||
- // 成员关系:left in right / not in
|
|
||||||
- ("in", l, Some(r)) => in_op(l, r),
|
|
||||||
- ("not in" | "nin", l, Some(r)) => !in_op(l, r),
|
|
||||||
-
|
|
||||||
- // 为空 / 非空(字符串、数组、对象、null)
|
|
||||||
- ("is empty" | "empty" | "isempty", l, _) => is_empty_val(l),
|
|
||||||
- ("is not empty" | "not empty" | "notempty", l, _) => !is_empty_val(l),
|
|
||||||
-
|
|
||||||
- // 布尔判断(对各类型进行布尔化)
|
|
||||||
- ("is true" | "is true?" | "istrue", l, _) => bool_like(l),
|
|
||||||
- ("is false" | "isfalse", l, _) => !bool_like(l),
|
|
||||||
-
|
|
||||||
- _ => false,
|
|
||||||
- };
|
|
||||||
- Ok(res)
|
|
||||||
-}
|
|
||||||
-
|
|
||||||
-fn resolve_value(ctx: &serde_json::Value, v: &serde_json::Value) -> anyhow::Result<serde_json::Value> {
|
|
||||||
- use serde_json::Value as V;
|
|
||||||
- let t = v.get("type").and_then(|v| v.as_str()).unwrap_or("");
|
|
||||||
- match t {
|
|
||||||
- "constant" => Ok(v.get("content").cloned().unwrap_or(V::Null)),
|
|
||||||
- "ref" => {
|
|
||||||
- // content: [nodeId, field]
|
|
||||||
- if let Some(arr) = v.get("content").and_then(|v| v.as_array()) {
|
|
||||||
- if arr.len() >= 2 {
|
|
||||||
- if let (Some(node), Some(field)) = (arr[0].as_str(), arr[1].as_str()) {
|
|
||||||
- let val = ctx
|
|
||||||
- .get("nodes")
|
|
||||||
- .and_then(|n| n.get(node))
|
|
||||||
- .and_then(|m| m.get(field))
|
|
||||||
- .cloned()
|
|
||||||
- .or_else(|| ctx.get(field).cloned())
|
|
||||||
- .unwrap_or(V::Null);
|
|
||||||
- return Ok(val);
|
|
||||||
- }
|
|
||||||
- }
|
|
||||||
- }
|
|
||||||
- Ok(V::Null)
|
|
||||||
- }
|
|
||||||
- "expression" => {
|
|
||||||
- let expr = v.get("content").and_then(|x| x.as_str()).unwrap_or("");
|
|
||||||
- if expr.trim().is_empty() { return Ok(V::Null); }
|
|
||||||
- Ok(super::engine::eval_rhai_expr_json(expr, ctx).unwrap_or(V::Null))
|
|
||||||
- }
|
|
||||||
- _ => Ok(V::Null),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
*/
|
|
||||||
|
impl std::error::Error for DriveError {}
|
||||||
@ -2,8 +2,7 @@ use async_trait::async_trait;
|
|||||||
use serde_json::{Value, json, Map};
|
use serde_json::{Value, json, Map};
|
||||||
use tracing::info;
|
use tracing::info;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::time::Duration;
|
use crate::middlewares::http_client::{execute_http, HttpClientOptions, HttpRequest};
|
||||||
use reqwest::Certificate;
|
|
||||||
|
|
||||||
use crate::flow::task::Executor;
|
use crate::flow::task::Executor;
|
||||||
use crate::flow::domain::{NodeDef, NodeId};
|
use crate::flow::domain::{NodeDef, NodeId};
|
||||||
@ -34,64 +33,29 @@ impl Executor for HttpTask {
|
|||||||
return Ok(());
|
return Ok(());
|
||||||
};
|
};
|
||||||
|
|
||||||
// 3) 解析配置
|
// 3) 解析配置 -> 转换为中间件请求参数
|
||||||
let (method, url, headers, query, body, opts) = parse_http_config(cfg)?;
|
let (method, url, headers, query, body, opts) = parse_http_config(cfg)?;
|
||||||
info!(target = "udmin.flow", "http task: {} {}", method, url);
|
info!(target = "udmin.flow", "http task: {} {}", method, url);
|
||||||
|
|
||||||
// 4) 发送请求(支持 HTTPS 相关选项)
|
let req = HttpRequest {
|
||||||
let client = {
|
method,
|
||||||
let mut builder = reqwest::Client::builder();
|
url,
|
||||||
if let Some(ms) = opts.timeout_ms { builder = builder.timeout(Duration::from_millis(ms)); }
|
headers,
|
||||||
if opts.insecure { builder = builder.danger_accept_invalid_certs(true); }
|
query,
|
||||||
if opts.http1_only { builder = builder.http1_only(); }
|
body,
|
||||||
if let Some(pem) = opts.ca_pem {
|
|
||||||
if let Ok(cert) = Certificate::from_pem(pem.as_bytes()) {
|
|
||||||
builder = builder.add_root_certificate(cert);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
builder.build()?
|
|
||||||
};
|
};
|
||||||
let mut req = client.request(method.parse()?, url);
|
let client_opts = HttpClientOptions {
|
||||||
|
timeout_ms: opts.timeout_ms,
|
||||||
if let Some(hs) = headers {
|
insecure: opts.insecure,
|
||||||
use reqwest::header::{HeaderMap, HeaderName, HeaderValue};
|
ca_pem: opts.ca_pem,
|
||||||
let mut map = HeaderMap::new();
|
http1_only: opts.http1_only,
|
||||||
for (k, v) in hs {
|
|
||||||
if let (Ok(name), Ok(value)) = (HeaderName::from_bytes(k.as_bytes()), HeaderValue::from_str(&v)) {
|
|
||||||
map.insert(name, value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
req = req.headers(map);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(qs) = query {
|
|
||||||
// 将查询参数转成 (String, String) 列表,便于 reqwest 序列化
|
|
||||||
let mut pairs: Vec<(String, String)> = Vec::new();
|
|
||||||
for (k, v) in qs {
|
|
||||||
let s = match v {
|
|
||||||
Value::String(s) => s,
|
|
||||||
Value::Number(n) => n.to_string(),
|
|
||||||
Value::Bool(b) => b.to_string(),
|
|
||||||
other => other.to_string(),
|
|
||||||
};
|
};
|
||||||
pairs.push((k, s));
|
|
||||||
}
|
|
||||||
req = req.query(&pairs);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(b) = body { req = req.json(&b); }
|
// 4) 调用中间件发送请求
|
||||||
|
let out = execute_http(req, client_opts).await?;
|
||||||
let resp = req.send().await?;
|
let status = out.status;
|
||||||
let status = resp.status().as_u16();
|
let headers_out = out.headers;
|
||||||
let headers_out: Map<String, Value> = resp
|
let parsed_body = out.body;
|
||||||
.headers()
|
|
||||||
.iter()
|
|
||||||
.map(|(k, v)| (k.to_string(), Value::String(v.to_str().unwrap_or("").to_string())))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
// 尝试以 JSON 解析,否则退回文本
|
|
||||||
let text = resp.text().await?;
|
|
||||||
let parsed_body: Value = serde_json::from_str(&text).unwrap_or_else(|_| Value::String(text));
|
|
||||||
|
|
||||||
// 5) 将结果写回 ctx
|
// 5) 将结果写回 ctx
|
||||||
let result = json!({
|
let result = json!({
|
||||||
@ -138,8 +102,15 @@ fn parse_http_config(cfg: Value) -> anyhow::Result<(
|
|||||||
let query = m.remove("query").and_then(|v| v.as_object().cloned());
|
let query = m.remove("query").and_then(|v| v.as_object().cloned());
|
||||||
let body = m.remove("body");
|
let body = m.remove("body");
|
||||||
|
|
||||||
// 可选 HTTPS/超时/HTTP 版本配置
|
// 统一解析超时配置(内联)
|
||||||
let timeout_ms = m.remove("timeout_ms").and_then(|v| v.as_u64());
|
let timeout_ms = if let Some(ms) = m.remove("timeout_ms").and_then(|v| v.as_u64()) {
|
||||||
|
Some(ms)
|
||||||
|
} else if let Some(Value::Object(mut to)) = m.remove("timeout") {
|
||||||
|
to.remove("timeout").and_then(|v| v.as_u64())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
let insecure = m.remove("insecure").and_then(|v| v.as_bool()).unwrap_or(false);
|
let insecure = m.remove("insecure").and_then(|v| v.as_bool()).unwrap_or(false);
|
||||||
let http1_only = m.remove("http1_only").and_then(|v| v.as_bool()).unwrap_or(false);
|
let http1_only = m.remove("http1_only").and_then(|v| v.as_bool()).unwrap_or(false);
|
||||||
let ca_pem = m.remove("ca_pem").and_then(|v| v.as_str().map(|s| s.to_string()));
|
let ca_pem = m.remove("ca_pem").and_then(|v| v.as_str().map(|s| s.to_string()));
|
||||||
|
|||||||
@ -1,6 +1,5 @@
|
|||||||
pub mod http;
|
pub mod http;
|
||||||
pub mod db;
|
pub mod db;
|
||||||
// removed: pub mod expr;
|
|
||||||
pub mod variable;
|
pub mod variable;
|
||||||
pub mod script_rhai;
|
pub mod script_rhai;
|
||||||
pub mod script_js;
|
pub mod script_js;
|
||||||
|
|||||||
219
backend/src/flow/log_handler.rs
Normal file
219
backend/src/flow/log_handler.rs
Normal file
@ -0,0 +1,219 @@
|
|||||||
|
use async_trait::async_trait;
|
||||||
|
use chrono::{DateTime, FixedOffset};
|
||||||
|
use serde_json::Value;
|
||||||
|
use tokio::sync::mpsc::Sender;
|
||||||
|
|
||||||
|
use crate::flow::context::StreamEvent;
|
||||||
|
use crate::services::flow_run_log_service::{self, CreateRunLogInput};
|
||||||
|
use crate::db::Db;
|
||||||
|
|
||||||
|
/// 流程执行日志处理器抽象接口
|
||||||
|
#[async_trait]
|
||||||
|
pub trait FlowLogHandler: Send + Sync {
|
||||||
|
/// 记录流程开始执行
|
||||||
|
async fn log_start(&self, flow_id: &str, flow_code: Option<&str>, input: &Value, operator: Option<(i64, String)>) -> anyhow::Result<()>;
|
||||||
|
|
||||||
|
/// 记录流程执行失败(仅包含错误信息)
|
||||||
|
async fn log_error(&self, flow_id: &str, flow_code: Option<&str>, input: &Value, error_msg: &str, operator: Option<(i64, String)>, started_at: DateTime<FixedOffset>, duration_ms: i64) -> anyhow::Result<()>;
|
||||||
|
|
||||||
|
/// 记录流程执行失败(包含部分输出与累计日志)
|
||||||
|
async fn log_error_detail(&self, flow_id: &str, flow_code: Option<&str>, input: &Value, output: &Value, logs: &[String], error_msg: &str, operator: Option<(i64, String)>, started_at: DateTime<FixedOffset>, duration_ms: i64) -> anyhow::Result<()> {
|
||||||
|
// 默认实现:退化为仅错误信息
|
||||||
|
self.log_error(flow_id, flow_code, input, error_msg, operator, started_at, duration_ms).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// 记录流程执行成功
|
||||||
|
async fn log_success(&self, flow_id: &str, flow_code: Option<&str>, input: &Value, output: &Value, logs: &[String], operator: Option<(i64, String)>, started_at: DateTime<FixedOffset>, duration_ms: i64) -> anyhow::Result<()>;
|
||||||
|
|
||||||
|
/// 推送节点执行事件(仅SSE实现需要)
|
||||||
|
async fn emit_node_event(&self, _node_id: &str, _event_type: &str, _data: &Value) -> anyhow::Result<()> {
|
||||||
|
// 默认空实现,数据库日志处理器不需要
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// 推送完成事件(仅SSE实现需要)
|
||||||
|
async fn emit_done(&self, _success: bool, _output: &Value, _logs: &[String]) -> anyhow::Result<()> {
|
||||||
|
// 默认空实现,数据库日志处理器不需要
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// 数据库日志处理器
|
||||||
|
pub struct DatabaseLogHandler {
|
||||||
|
db: Db,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DatabaseLogHandler {
|
||||||
|
pub fn new(db: Db) -> Self {
|
||||||
|
Self { db }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl FlowLogHandler for DatabaseLogHandler {
|
||||||
|
async fn log_start(&self, _flow_id: &str, _flow_code: Option<&str>, _input: &Value, _operator: Option<(i64, String)>) -> anyhow::Result<()> {
|
||||||
|
// 数据库日志处理器不需要记录开始事件,只在结束时记录
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn log_error(&self, flow_id: &str, flow_code: Option<&str>, input: &Value, error_msg: &str, operator: Option<(i64, String)>, started_at: DateTime<FixedOffset>, duration_ms: i64) -> anyhow::Result<()> {
|
||||||
|
let (user_id, username) = operator.map(|(u, n)| (Some(u), Some(n))).unwrap_or((None, None));
|
||||||
|
flow_run_log_service::create(&self.db, CreateRunLogInput {
|
||||||
|
flow_id: flow_id.to_string(),
|
||||||
|
flow_code: flow_code.map(|s| s.to_string()),
|
||||||
|
input: Some(serde_json::to_string(input).unwrap_or_default()),
|
||||||
|
output: None,
|
||||||
|
ok: false,
|
||||||
|
logs: Some(error_msg.to_string()),
|
||||||
|
user_id,
|
||||||
|
username,
|
||||||
|
started_at,
|
||||||
|
duration_ms,
|
||||||
|
}).await.map_err(|e| anyhow::anyhow!("Failed to create error log: {}", e))?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn log_error_detail(&self, flow_id: &str, flow_code: Option<&str>, input: &Value, output: &Value, logs: &[String], error_msg: &str, operator: Option<(i64, String)>, started_at: DateTime<FixedOffset>, duration_ms: i64) -> anyhow::Result<()> {
|
||||||
|
let (user_id, username) = operator.map(|(u, n)| (Some(u), Some(n))).unwrap_or((None, None));
|
||||||
|
// 将 error_msg 附加到日志尾部(若最后一条不同),确保日志中有清晰的错误描述且不重复
|
||||||
|
let mut all_logs = logs.to_vec();
|
||||||
|
if all_logs.last().map(|s| s != error_msg).unwrap_or(true) {
|
||||||
|
all_logs.push(error_msg.to_string());
|
||||||
|
}
|
||||||
|
flow_run_log_service::create(&self.db, CreateRunLogInput {
|
||||||
|
flow_id: flow_id.to_string(),
|
||||||
|
flow_code: flow_code.map(|s| s.to_string()),
|
||||||
|
input: Some(serde_json::to_string(input).unwrap_or_default()),
|
||||||
|
output: Some(serde_json::to_string(output).unwrap_or_default()),
|
||||||
|
ok: false,
|
||||||
|
logs: Some(serde_json::to_string(&all_logs).unwrap_or_default()),
|
||||||
|
user_id,
|
||||||
|
username,
|
||||||
|
started_at,
|
||||||
|
duration_ms,
|
||||||
|
}).await.map_err(|e| anyhow::anyhow!("Failed to create error log with details: {}", e))?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn log_success(&self, flow_id: &str, flow_code: Option<&str>, input: &Value, output: &Value, logs: &[String], operator: Option<(i64, String)>, started_at: DateTime<FixedOffset>, duration_ms: i64) -> anyhow::Result<()> {
|
||||||
|
let (user_id, username) = operator.map(|(u, n)| (Some(u), Some(n))).unwrap_or((None, None));
|
||||||
|
flow_run_log_service::create(&self.db, CreateRunLogInput {
|
||||||
|
flow_id: flow_id.to_string(),
|
||||||
|
flow_code: flow_code.map(|s| s.to_string()),
|
||||||
|
input: Some(serde_json::to_string(input).unwrap_or_default()),
|
||||||
|
output: Some(serde_json::to_string(output).unwrap_or_default()),
|
||||||
|
ok: true,
|
||||||
|
logs: Some(serde_json::to_string(logs).unwrap_or_default()),
|
||||||
|
user_id,
|
||||||
|
username,
|
||||||
|
started_at,
|
||||||
|
duration_ms,
|
||||||
|
}).await.map_err(|e| anyhow::anyhow!("Failed to create success log: {}", e))?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// SSE日志处理器
|
||||||
|
pub struct SseLogHandler {
|
||||||
|
db: Db,
|
||||||
|
event_tx: Sender<StreamEvent>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SseLogHandler {
|
||||||
|
pub fn new(db: Db, event_tx: Sender<StreamEvent>) -> Self {
|
||||||
|
Self { db, event_tx }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[async_trait]
|
||||||
|
impl FlowLogHandler for SseLogHandler {
|
||||||
|
async fn log_start(&self, _flow_id: &str, _flow_code: Option<&str>, _input: &Value, _operator: Option<(i64, String)>) -> anyhow::Result<()> {
|
||||||
|
// SSE处理器也不需要记录开始事件
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn log_error(&self, flow_id: &str, flow_code: Option<&str>, input: &Value, error_msg: &str, operator: Option<(i64, String)>, started_at: DateTime<FixedOffset>, duration_ms: i64) -> anyhow::Result<()> {
|
||||||
|
// 先推送SSE错误事件(不在此处发送 done,交由调用方统一携带 ctx/logs 发送)
|
||||||
|
crate::middlewares::sse::emit_error(&self.event_tx, error_msg.to_string()).await;
|
||||||
|
|
||||||
|
// 然后记录到数据库(仅错误信息)
|
||||||
|
let (user_id, username) = operator.map(|(u, n)| (Some(u), Some(n))).unwrap_or((None, None));
|
||||||
|
flow_run_log_service::create(&self.db, CreateRunLogInput {
|
||||||
|
flow_id: flow_id.to_string(),
|
||||||
|
flow_code: flow_code.map(|s| s.to_string()),
|
||||||
|
input: Some(serde_json::to_string(input).unwrap_or_default()),
|
||||||
|
output: None,
|
||||||
|
ok: false,
|
||||||
|
logs: Some(error_msg.to_string()),
|
||||||
|
user_id,
|
||||||
|
username,
|
||||||
|
started_at,
|
||||||
|
duration_ms,
|
||||||
|
}).await.map_err(|e| anyhow::anyhow!("Failed to create error log: {}", e))?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn log_error_detail(&self, flow_id: &str, flow_code: Option<&str>, input: &Value, output: &Value, logs: &[String], error_msg: &str, operator: Option<(i64, String)>, started_at: DateTime<FixedOffset>, duration_ms: i64) -> anyhow::Result<()> {
|
||||||
|
// 先推送SSE错误事件(不在此处发送 done,交由调用方统一携带 ctx/logs 发送)
|
||||||
|
crate::middlewares::sse::emit_error(&self.event_tx, error_msg.to_string()).await;
|
||||||
|
|
||||||
|
// 然后记录到数据库(包含部分输出与累计日志),避免重复附加相同错误信息
|
||||||
|
let (user_id, username) = operator.map(|(u, n)| (Some(u), Some(n))).unwrap_or((None, None));
|
||||||
|
let mut all_logs = logs.to_vec();
|
||||||
|
if all_logs.last().map(|s| s != error_msg).unwrap_or(true) {
|
||||||
|
all_logs.push(error_msg.to_string());
|
||||||
|
}
|
||||||
|
flow_run_log_service::create(&self.db, CreateRunLogInput {
|
||||||
|
flow_id: flow_id.to_string(),
|
||||||
|
flow_code: flow_code.map(|s| s.to_string()),
|
||||||
|
input: Some(serde_json::to_string(input).unwrap_or_default()),
|
||||||
|
output: Some(serde_json::to_string(output).unwrap_or_default()),
|
||||||
|
ok: false,
|
||||||
|
logs: Some(serde_json::to_string(&all_logs).unwrap_or_default()),
|
||||||
|
user_id,
|
||||||
|
username,
|
||||||
|
started_at,
|
||||||
|
duration_ms,
|
||||||
|
}).await.map_err(|e| anyhow::anyhow!("Failed to create error log with details: {}", e))?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn log_success(&self, flow_id: &str, flow_code: Option<&str>, input: &Value, output: &Value, logs: &[String], operator: Option<(i64, String)>, started_at: DateTime<FixedOffset>, duration_ms: i64) -> anyhow::Result<()> {
|
||||||
|
// 先推送SSE完成事件
|
||||||
|
crate::middlewares::sse::emit_done(&self.event_tx, true, output.clone(), logs.to_vec()).await;
|
||||||
|
|
||||||
|
// 然后记录到数据库
|
||||||
|
let (user_id, username) = operator.map(|(u, n)| (Some(u), Some(n))).unwrap_or((None, None));
|
||||||
|
flow_run_log_service::create(&self.db, CreateRunLogInput {
|
||||||
|
flow_id: flow_id.to_string(),
|
||||||
|
flow_code: flow_code.map(|s| s.to_string()),
|
||||||
|
input: Some(serde_json::to_string(input).unwrap_or_default()),
|
||||||
|
output: Some(serde_json::to_string(output).unwrap_or_default()),
|
||||||
|
ok: true,
|
||||||
|
logs: Some(serde_json::to_string(logs).unwrap_or_default()),
|
||||||
|
user_id,
|
||||||
|
username,
|
||||||
|
started_at,
|
||||||
|
duration_ms,
|
||||||
|
}).await.map_err(|e| anyhow::anyhow!("Failed to create success log: {}", e))?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn emit_node_event(&self, node_id: &str, event_type: &str, data: &Value) -> anyhow::Result<()> {
|
||||||
|
// 推送节点事件到SSE
|
||||||
|
let event = StreamEvent::Node {
|
||||||
|
node_id: node_id.to_string(),
|
||||||
|
logs: vec![event_type.to_string()],
|
||||||
|
ctx: data.clone(),
|
||||||
|
};
|
||||||
|
if let Err(_e) = self.event_tx.send(event).await {
|
||||||
|
// 通道可能已关闭,忽略错误
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn emit_done(&self, success: bool, output: &Value, logs: &[String]) -> anyhow::Result<()> {
|
||||||
|
crate::middlewares::sse::emit_done(&self.event_tx, success, output.clone(), logs.to_vec()).await;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -1,6 +1,6 @@
|
|||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
// Extract http config: method, url, headers, query, body from a node
|
// 从节点中提取 HTTP 配置:method、url、headers、query、body
|
||||||
pub fn extract_http_cfg(n: &Value) -> Option<Value> {
|
pub fn extract_http_cfg(n: &Value) -> Option<Value> {
|
||||||
let data = n.get("data");
|
let data = n.get("data");
|
||||||
let api = data.and_then(|d| d.get("api"));
|
let api = data.and_then(|d| d.get("api"));
|
||||||
@ -28,7 +28,7 @@ pub fn extract_http_cfg(n: &Value) -> Option<Value> {
|
|||||||
http_obj.insert("method".into(), Value::String(method));
|
http_obj.insert("method".into(), Value::String(method));
|
||||||
http_obj.insert("url".into(), Value::String(url));
|
http_obj.insert("url".into(), Value::String(url));
|
||||||
|
|
||||||
// Optional: headers
|
// 可选:headers
|
||||||
if let Some(hs) = api.and_then(|a| a.get("headers")).and_then(|v| v.as_object()) {
|
if let Some(hs) = api.and_then(|a| a.get("headers")).and_then(|v| v.as_object()) {
|
||||||
let mut heads = serde_json::Map::new();
|
let mut heads = serde_json::Map::new();
|
||||||
for (k, v) in hs.iter() {
|
for (k, v) in hs.iter() {
|
||||||
@ -41,7 +41,7 @@ pub fn extract_http_cfg(n: &Value) -> Option<Value> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Optional: query
|
// 可选:query
|
||||||
if let Some(qs) = api.and_then(|a| a.get("query")).and_then(|v| v.as_object()) {
|
if let Some(qs) = api.and_then(|a| a.get("query")).and_then(|v| v.as_object()) {
|
||||||
let mut query = serde_json::Map::new();
|
let mut query = serde_json::Map::new();
|
||||||
for (k, v) in qs.iter() {
|
for (k, v) in qs.iter() {
|
||||||
@ -52,7 +52,7 @@ pub fn extract_http_cfg(n: &Value) -> Option<Value> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Optional: body
|
// 可选:body
|
||||||
if let Some(body_obj) = data.and_then(|d| d.get("body")).and_then(|v| v.as_object()) {
|
if let Some(body_obj) = data.and_then(|d| d.get("body")).and_then(|v| v.as_object()) {
|
||||||
if let Some(Value::Object(json_body)) = body_obj.get("json") {
|
if let Some(Value::Object(json_body)) = body_obj.get("json") {
|
||||||
http_obj.insert("body".into(), Value::Object(json_body.clone()));
|
http_obj.insert("body".into(), Value::Object(json_body.clone()));
|
||||||
@ -61,5 +61,28 @@ pub fn extract_http_cfg(n: &Value) -> Option<Value> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 可选:超时(统一处理:数字或对象)
|
||||||
|
if let Some(to_val) = data.and_then(|d| d.get("timeout")) {
|
||||||
|
match to_val {
|
||||||
|
Value::Number(n) => {
|
||||||
|
http_obj.insert("timeout_ms".into(), Value::Number(n.clone()));
|
||||||
|
}
|
||||||
|
Value::Object(obj) => {
|
||||||
|
// 只读访问对象中的字段并规范化
|
||||||
|
let mut t = serde_json::Map::new();
|
||||||
|
if let Some(ms) = obj.get("timeout").and_then(|v| v.as_u64()) {
|
||||||
|
t.insert("timeout".into(), Value::Number(serde_json::Number::from(ms)));
|
||||||
|
}
|
||||||
|
if let Some(rt) = obj.get("retryTimes").and_then(|v| v.as_u64()) {
|
||||||
|
t.insert("retryTimes".into(), Value::Number(serde_json::Number::from(rt)));
|
||||||
|
}
|
||||||
|
if !t.is_empty() {
|
||||||
|
http_obj.insert("timeout".into(), Value::Object(t));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Some(Value::Object(http_obj))
|
Some(Value::Object(http_obj))
|
||||||
}
|
}
|
||||||
@ -3,6 +3,6 @@ pub mod context;
|
|||||||
pub mod task;
|
pub mod task;
|
||||||
pub mod engine;
|
pub mod engine;
|
||||||
pub mod dsl;
|
pub mod dsl;
|
||||||
// removed: pub mod storage;
|
|
||||||
pub mod executors;
|
pub mod executors;
|
||||||
pub mod mappers;
|
pub mod mappers;
|
||||||
|
pub mod log_handler;
|
||||||
178
backend/src/middlewares/http_client.rs
Normal file
178
backend/src/middlewares/http_client.rs
Normal file
@ -0,0 +1,178 @@
|
|||||||
|
use std::collections::HashMap;
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
use reqwest::Certificate;
|
||||||
|
use serde_json::{Map, Value};
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Default)]
|
||||||
|
pub struct HttpClientOptions {
|
||||||
|
pub timeout_ms: Option<u64>,
|
||||||
|
pub insecure: bool,
|
||||||
|
pub ca_pem: Option<String>,
|
||||||
|
pub http1_only: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Default)]
|
||||||
|
pub struct HttpRequest {
|
||||||
|
pub method: String,
|
||||||
|
pub url: String,
|
||||||
|
pub headers: Option<HashMap<String, String>>, // header values are strings
|
||||||
|
pub query: Option<Map<String, Value>>, // query values will be stringified
|
||||||
|
pub body: Option<Value>, // json body
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct HttpResponse {
|
||||||
|
pub status: u16,
|
||||||
|
pub headers: Map<String, Value>,
|
||||||
|
pub body: Value,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn execute_http(req: HttpRequest, opts: HttpClientOptions) -> Result<HttpResponse> {
|
||||||
|
// Build client with options
|
||||||
|
let mut builder = reqwest::Client::builder();
|
||||||
|
if let Some(ms) = opts.timeout_ms {
|
||||||
|
builder = builder.timeout(Duration::from_millis(ms));
|
||||||
|
}
|
||||||
|
if opts.insecure {
|
||||||
|
builder = builder.danger_accept_invalid_certs(true);
|
||||||
|
}
|
||||||
|
if opts.http1_only {
|
||||||
|
builder = builder.http1_only();
|
||||||
|
}
|
||||||
|
if let Some(pem) = opts.ca_pem {
|
||||||
|
if let Ok(cert) = Certificate::from_pem(pem.as_bytes()) {
|
||||||
|
builder = builder.add_root_certificate(cert);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let client = builder.build()?;
|
||||||
|
|
||||||
|
// Build request
|
||||||
|
let mut rb = client.request(req.method.parse()?, req.url);
|
||||||
|
|
||||||
|
// Also set per-request timeout to ensure it takes effect in all cases
|
||||||
|
if let Some(ms) = opts.timeout_ms {
|
||||||
|
rb = rb.timeout(Duration::from_millis(ms));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(hs) = req.headers {
|
||||||
|
use reqwest::header::{HeaderMap, HeaderName, HeaderValue};
|
||||||
|
let mut map = HeaderMap::new();
|
||||||
|
for (k, v) in hs {
|
||||||
|
if let (Ok(name), Ok(value)) = (HeaderName::from_bytes(k.as_bytes()), HeaderValue::from_str(&v)) {
|
||||||
|
map.insert(name, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
rb = rb.headers(map);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(qs) = req.query {
|
||||||
|
let mut pairs: Vec<(String, String)> = Vec::new();
|
||||||
|
for (k, v) in qs {
|
||||||
|
let s = match v {
|
||||||
|
Value::String(s) => s,
|
||||||
|
Value::Number(n) => n.to_string(),
|
||||||
|
Value::Bool(b) => b.to_string(),
|
||||||
|
other => other.to_string(),
|
||||||
|
};
|
||||||
|
pairs.push((k, s));
|
||||||
|
}
|
||||||
|
rb = rb.query(&pairs);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(b) = req.body {
|
||||||
|
rb = rb.json(&b);
|
||||||
|
}
|
||||||
|
|
||||||
|
let resp = rb.send().await?;
|
||||||
|
let status = resp.status().as_u16();
|
||||||
|
let headers_out: Map<String, Value> = resp
|
||||||
|
.headers()
|
||||||
|
.iter()
|
||||||
|
.map(|(k, v)| (k.to_string(), Value::String(v.to_str().unwrap_or("").to_string())))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let text = resp.text().await?;
|
||||||
|
let parsed_body: Value = serde_json::from_str(&text).unwrap_or_else(|_| Value::String(text));
|
||||||
|
|
||||||
|
Ok(HttpResponse {
|
||||||
|
status,
|
||||||
|
headers: headers_out,
|
||||||
|
body: parsed_body,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
use wiremock::matchers::{method, path};
|
||||||
|
use wiremock::{Mock, MockServer, ResponseTemplate};
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_get_success() {
|
||||||
|
let server = MockServer::start().await;
|
||||||
|
let body = serde_json::json!({"ok": true});
|
||||||
|
Mock::given(method("GET"))
|
||||||
|
.and(path("/hello"))
|
||||||
|
.respond_with(ResponseTemplate::new(200).set_body_json(body.clone()))
|
||||||
|
.mount(&server)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let req = HttpRequest {
|
||||||
|
method: "GET".into(),
|
||||||
|
url: format!("{}/hello", server.uri()),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let opts = HttpClientOptions::default();
|
||||||
|
let resp = execute_http(req, opts).await.unwrap();
|
||||||
|
assert_eq!(resp.status, 200);
|
||||||
|
assert_eq!(resp.body, body);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_post_json() {
|
||||||
|
let server = MockServer::start().await;
|
||||||
|
let input = serde_json::json!({"name": "udmin"});
|
||||||
|
Mock::given(method("POST")).and(path("/echo"))
|
||||||
|
.respond_with(|req: &wiremock::Request| {
|
||||||
|
// Echo back the request body as JSON
|
||||||
|
let body = serde_json::from_slice::<Value>(&req.body).unwrap_or(Value::Null);
|
||||||
|
ResponseTemplate::new(201).set_body_json(body)
|
||||||
|
})
|
||||||
|
.mount(&server)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let req = HttpRequest {
|
||||||
|
method: "POST".into(),
|
||||||
|
url: format!("{}/echo", server.uri()),
|
||||||
|
body: Some(input.clone()),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
let opts = HttpClientOptions::default();
|
||||||
|
let resp = execute_http(req, opts).await.unwrap();
|
||||||
|
assert_eq!(resp.status, 201);
|
||||||
|
assert_eq!(resp.body, input);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn test_timeout() {
|
||||||
|
let server = MockServer::start().await;
|
||||||
|
// Delay longer than our timeout
|
||||||
|
Mock::given(method("GET"))
|
||||||
|
.and(path("/slow"))
|
||||||
|
.respond_with(ResponseTemplate::new(200).set_delay(Duration::from_millis(200)))
|
||||||
|
.mount(&server)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let req = HttpRequest { method: "GET".into(), url: format!("{}/slow", server.uri()), ..Default::default() };
|
||||||
|
let opts = HttpClientOptions { timeout_ms: Some(50), ..Default::default() };
|
||||||
|
let err = execute_http(req, opts).await.unwrap_err();
|
||||||
|
// Try to verify it's a timeout error from reqwest
|
||||||
|
let is_timeout = err
|
||||||
|
.downcast_ref::<reqwest::Error>()
|
||||||
|
.map(|e| e.is_timeout())
|
||||||
|
.unwrap_or(false);
|
||||||
|
assert!(is_timeout, "expected timeout error, got: {err}");
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -1,2 +1,4 @@
|
|||||||
pub mod jwt;
|
pub mod jwt;
|
||||||
pub mod logging;
|
pub mod logging;
|
||||||
|
pub mod sse;
|
||||||
|
pub mod http_client;
|
||||||
75
backend/src/middlewares/sse.rs
Normal file
75
backend/src/middlewares/sse.rs
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
use axum::response::sse::{Event, KeepAlive, Sse};
|
||||||
|
use futures::Stream;
|
||||||
|
use std::convert::Infallible;
|
||||||
|
use std::time::Duration;
|
||||||
|
use tokio_stream::{wrappers::ReceiverStream, StreamExt as _};
|
||||||
|
|
||||||
|
// 引入后端流式事件类型
|
||||||
|
use crate::flow::context::StreamEvent;
|
||||||
|
|
||||||
|
// 新增:日志与时间戳
|
||||||
|
use tracing::info;
|
||||||
|
use chrono::Utc;
|
||||||
|
|
||||||
|
/// 将 mpsc::Receiver<T> 包装为 SSE 响应,其中 T 需实现 Serialize
|
||||||
|
/// - 自动序列化为 JSON 文本并写入 data: 行
|
||||||
|
/// - 附带 keep-alive,避免长连接超时
|
||||||
|
pub fn from_mpsc<T>(rx: tokio::sync::mpsc::Receiver<T>) -> Sse<impl Stream<Item = Result<Event, Infallible>>>
|
||||||
|
where
|
||||||
|
T: serde::Serialize + Send + 'static,
|
||||||
|
{
|
||||||
|
let stream = ReceiverStream::new(rx).map(|evt| {
|
||||||
|
let payload = serde_json::to_string(&evt).unwrap_or_else(|_| "{}".to_string());
|
||||||
|
// 关键日志:每次将事件映射为 SSE 帧时记录时间点(代表即将写入响应流)
|
||||||
|
info!(target: "udmin.sse", ts = %Utc::now().to_rfc3339(), payload_len = payload.len(), "sse send");
|
||||||
|
Ok::<Event, Infallible>(Event::default().data(payload))
|
||||||
|
});
|
||||||
|
|
||||||
|
Sse::new(stream).keep_alive(KeepAlive::new().interval(Duration::from_secs(10)).text("keep-alive"))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// 统一发送:节点事件
|
||||||
|
pub async fn emit_node(
|
||||||
|
tx: &tokio::sync::mpsc::Sender<StreamEvent>,
|
||||||
|
node_id: impl Into<String>,
|
||||||
|
logs: Vec<String>,
|
||||||
|
ctx: serde_json::Value,
|
||||||
|
) {
|
||||||
|
let nid = node_id.into();
|
||||||
|
// 日志:事件入队时间
|
||||||
|
info!(target: "udmin.sse", kind = "node", node_id = %nid, logs_len = logs.len(), ts = %Utc::now().to_rfc3339(), "enqueue event");
|
||||||
|
let _ = tx
|
||||||
|
.send(StreamEvent::Node {
|
||||||
|
node_id: nid,
|
||||||
|
logs,
|
||||||
|
ctx,
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// 统一发送:完成事件
|
||||||
|
pub async fn emit_done(
|
||||||
|
tx: &tokio::sync::mpsc::Sender<StreamEvent>,
|
||||||
|
ok: bool,
|
||||||
|
ctx: serde_json::Value,
|
||||||
|
logs: Vec<String>,
|
||||||
|
) {
|
||||||
|
info!(target: "udmin.sse", kind = "done", ok = ok, logs_len = logs.len(), ts = %Utc::now().to_rfc3339(), "enqueue event");
|
||||||
|
let _ = tx
|
||||||
|
.send(StreamEvent::Done { ok, ctx, logs })
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// 统一发送:错误事件
|
||||||
|
pub async fn emit_error(
|
||||||
|
tx: &tokio::sync::mpsc::Sender<StreamEvent>,
|
||||||
|
message: impl Into<String>,
|
||||||
|
) {
|
||||||
|
let msg = message.into();
|
||||||
|
info!(target: "udmin.sse", kind = "error", message = %msg, ts = %Utc::now().to_rfc3339(), "enqueue event");
|
||||||
|
let _ = tx
|
||||||
|
.send(StreamEvent::Error {
|
||||||
|
message: msg,
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
@ -4,11 +4,16 @@ use serde::Deserialize;
|
|||||||
use tracing::{info, error};
|
use tracing::{info, error};
|
||||||
use crate::middlewares::jwt::AuthUser;
|
use crate::middlewares::jwt::AuthUser;
|
||||||
|
|
||||||
|
// 新增:引入通用 SSE 组件
|
||||||
|
use crate::middlewares::sse;
|
||||||
|
|
||||||
pub fn router() -> Router<Db> {
|
pub fn router() -> Router<Db> {
|
||||||
Router::new()
|
Router::new()
|
||||||
.route("/flows", post(create).get(list))
|
.route("/flows", post(create).get(list))
|
||||||
.route("/flows/{id}", get(get_one).put(update).delete(remove))
|
.route("/flows/{id}", get(get_one).put(update).delete(remove))
|
||||||
.route("/flows/{id}/run", post(run))
|
.route("/flows/{id}/run", post(run))
|
||||||
|
// 新增:流式运行(SSE)端点
|
||||||
|
.route("/flows/{id}/run/stream", post(run_stream))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
@ -83,3 +88,22 @@ async fn run(State(db): State<Db>, user: AuthUser, Path(id): Path<String>, Json(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 新增:SSE 流式运行端点,请求体沿用 RunReq(只包含 input)
|
||||||
|
async fn run_stream(State(db): State<Db>, user: AuthUser, Path(id): Path<String>, Json(req): Json<RunReq>) -> Result<axum::response::sse::Sse<impl futures::Stream<Item = Result<axum::response::sse::Event, std::convert::Infallible>>>, AppError> {
|
||||||
|
// 建立 mpsc 通道用于接收引擎的流式事件
|
||||||
|
let (tx, rx) = tokio::sync::mpsc::channel::<crate::flow::context::StreamEvent>(16);
|
||||||
|
|
||||||
|
// 启动后台任务运行流程,将事件通过 tx 发送
|
||||||
|
let db_clone = db.clone();
|
||||||
|
let id_clone = id.clone();
|
||||||
|
let input = req.input.clone();
|
||||||
|
let user_info = Some((user.uid, user.username));
|
||||||
|
tokio::spawn(async move {
|
||||||
|
// 复用 flow_service::run 内部大部分逻辑,但通过 DriveOptions 注入 event_tx
|
||||||
|
let _ = flow_service::run_with_stream(db_clone, &id_clone, flow_service::RunReq { input }, user_info, tx).await;
|
||||||
|
});
|
||||||
|
|
||||||
|
// 由通用组件把 Receiver 包装为 SSE 响应
|
||||||
|
Ok(sse::from_mpsc(rx))
|
||||||
|
}
|
||||||
@ -4,7 +4,7 @@ use anyhow::Context as _;
|
|||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use crate::error::AppError;
|
use crate::error::AppError;
|
||||||
use crate::flow::{self, dsl::FlowDSL, engine::FlowEngine, context::{DriveOptions, ExecutionMode}};
|
use crate::flow::{self, dsl::FlowDSL, engine::FlowEngine, context::{DriveOptions, ExecutionMode, StreamEvent}, log_handler::{FlowLogHandler, DatabaseLogHandler, SseLogHandler}};
|
||||||
use crate::db::Db;
|
use crate::db::Db;
|
||||||
use crate::models::flow as db_flow;
|
use crate::models::flow as db_flow;
|
||||||
use crate::models::request_log; // 新增:查询最近修改人
|
use crate::models::request_log; // 新增:查询最近修改人
|
||||||
@ -14,6 +14,10 @@ use sea_orm::{EntityTrait, ActiveModelTrait, Set, DbErr, ColumnTrait, QueryFilte
|
|||||||
use sea_orm::entity::prelude::DateTimeWithTimeZone; // 新增:时间类型
|
use sea_orm::entity::prelude::DateTimeWithTimeZone; // 新增:时间类型
|
||||||
use chrono::{Utc, FixedOffset};
|
use chrono::{Utc, FixedOffset};
|
||||||
use tracing::{info, error};
|
use tracing::{info, error};
|
||||||
|
// 新增:用于流式事件通道
|
||||||
|
use tokio::sync::mpsc::Sender;
|
||||||
|
// 新增:用于错误下传递局部上下文与日志
|
||||||
|
use crate::flow::engine::DriveError;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct FlowSummary {
|
pub struct FlowSummary {
|
||||||
@ -197,186 +201,152 @@ pub async fn delete(db: &Db, id: &str) -> anyhow::Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn run(db: &Db, id: &str, req: RunReq, operator: Option<(i64, String)>) -> anyhow::Result<RunResult> {
|
pub async fn run(db: &Db, id: &str, req: RunReq, operator: Option<(i64, String)>) -> anyhow::Result<RunResult> {
|
||||||
info!(target = "udmin", "flow.run: start id={}", id);
|
let log_handler = DatabaseLogHandler::new(db.clone());
|
||||||
|
match run_internal(db, id, req, operator, &log_handler, None).await {
|
||||||
|
Ok((ctx, logs)) => Ok(RunResult { ok: true, ctx, logs }),
|
||||||
|
Err(e) => {
|
||||||
|
// 将运行期错误转换为 ok=false,并尽量带上部分 ctx/logs
|
||||||
|
if let Some(de) = e.downcast_ref::<DriveError>().cloned() {
|
||||||
|
Ok(RunResult { ok: false, ctx: de.ctx, logs: de.logs })
|
||||||
|
} else {
|
||||||
|
let mut full = e.to_string();
|
||||||
|
for cause in e.chain().skip(1) {
|
||||||
|
full.push_str(" | ");
|
||||||
|
full.push_str(&cause.to_string());
|
||||||
|
}
|
||||||
|
Ok(RunResult { ok: false, ctx: serde_json::json!({}), logs: vec![full] })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 新增:流式运行,向外发送节点事件与最终完成事件
|
||||||
|
pub async fn run_with_stream(
|
||||||
|
db: Db,
|
||||||
|
id: &str,
|
||||||
|
req: RunReq,
|
||||||
|
operator: Option<(i64, String)>,
|
||||||
|
event_tx: Sender<StreamEvent>,
|
||||||
|
) -> anyhow::Result<()> {
|
||||||
|
// clone 一份用于错误时补发 done
|
||||||
|
let tx_done = event_tx.clone();
|
||||||
|
let log_handler = SseLogHandler::new(db.clone(), event_tx.clone());
|
||||||
|
match run_internal(&db, id, req, operator, &log_handler, Some(event_tx)).await {
|
||||||
|
Ok((_ctx, _logs)) => Ok(()), // 正常路径:log_success 内已发送 done(true,...)
|
||||||
|
Err(e) => {
|
||||||
|
// 错误路径:先在 log_error 中已发送 error 事件;此处补发 done(false,...)
|
||||||
|
if let Some(de) = e.downcast_ref::<DriveError>().cloned() {
|
||||||
|
crate::middlewares::sse::emit_done(&tx_done, false, de.ctx, de.logs).await;
|
||||||
|
} else {
|
||||||
|
let mut full = e.to_string();
|
||||||
|
for cause in e.chain().skip(1) { full.push_str(" | "); full.push_str(&cause.to_string()); }
|
||||||
|
crate::middlewares::sse::emit_done(&tx_done, false, serde_json::json!({}), vec![full]).await;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 内部统一的运行方法
|
||||||
|
async fn run_internal(
|
||||||
|
db: &Db,
|
||||||
|
id: &str,
|
||||||
|
req: RunReq,
|
||||||
|
operator: Option<(i64, String)>,
|
||||||
|
log_handler: &dyn FlowLogHandler,
|
||||||
|
event_tx: Option<Sender<StreamEvent>>,
|
||||||
|
) -> anyhow::Result<(serde_json::Value, Vec<String>)> {
|
||||||
|
// 使用传入的 event_tx(当启用 SSE 时由路由层提供)
|
||||||
|
info!(target = "udmin", "flow.run_internal: start id={}", id);
|
||||||
let start = Utc::now().with_timezone(&FixedOffset::east_opt(0).unwrap());
|
let start = Utc::now().with_timezone(&FixedOffset::east_opt(0).unwrap());
|
||||||
// 获取流程编码,便于写入运行日志
|
|
||||||
|
// 获取流程编码
|
||||||
let flow_code: Option<String> = match db_flow::Entity::find_by_id(id.to_string()).one(db).await {
|
let flow_code: Option<String> = match db_flow::Entity::find_by_id(id.to_string()).one(db).await {
|
||||||
Ok(Some(row)) => row.code,
|
Ok(Some(row)) => row.code,
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
};
|
||||||
// 获取流程文档并记录失败原因
|
|
||||||
|
// 获取流程文档
|
||||||
let doc = match get(db, id).await {
|
let doc = match get(db, id).await {
|
||||||
Ok(d) => d,
|
Ok(d) => d,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
error!(target = "udmin", error = ?e, "flow.run: get doc failed id={}", id);
|
error!(target = "udmin", error = ?e, "flow.run_internal: get doc failed id={}", id);
|
||||||
// 记录失败日志
|
let error_msg = format!("get doc failed: {}", e);
|
||||||
let (user_id, username) = operator.map(|(u, n)| (Some(u), Some(n))).unwrap_or((None, None));
|
log_handler.log_error(id, flow_code.as_deref(), &req.input, &error_msg, operator, start, 0).await?;
|
||||||
let _ = flow_run_log_service::create(db, CreateRunLogInput {
|
|
||||||
flow_id: id.to_string(),
|
|
||||||
flow_code: flow_code.clone(),
|
|
||||||
input: Some(serde_json::to_string(&req.input).unwrap_or_default()),
|
|
||||||
output: None,
|
|
||||||
ok: false,
|
|
||||||
logs: Some(format!("get doc failed: {}", e)),
|
|
||||||
user_id,
|
|
||||||
username,
|
|
||||||
started_at: start,
|
|
||||||
duration_ms: 0,
|
|
||||||
}).await;
|
|
||||||
return Err(e);
|
return Err(e);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// 记录文档基本信息,便于判断走 JSON 还是 YAML
|
info!(target = "udmin", "flow.run_internal: doc loaded id={} has_design_json={} yaml_len={}", id, doc.design_json.is_some(), doc.yaml.len());
|
||||||
info!(target = "udmin", "flow.run: doc loaded id={} has_design_json={} yaml_len={}", id, doc.design_json.is_some(), doc.yaml.len());
|
|
||||||
|
|
||||||
// Prefer design_json if present; otherwise fall back to YAML
|
// 构建 chain 与 ctx
|
||||||
let mut exec_mode: ExecutionMode = ExecutionMode::Sync;
|
let mut exec_mode: ExecutionMode = ExecutionMode::Sync;
|
||||||
let (mut chain, mut ctx) = if let Some(design) = &doc.design_json {
|
let (mut chain, mut ctx) = if let Some(design) = &doc.design_json {
|
||||||
info!(target = "udmin", "flow.run: building chain from design_json id={}", id);
|
|
||||||
let chain_from_json = match flow::dsl::chain_from_design_json(design) {
|
let chain_from_json = match flow::dsl::chain_from_design_json(design) {
|
||||||
Ok(c) => c,
|
Ok(c) => c,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
error!(target = "udmin", error = ?e, "flow.run: build chain from design_json failed id={}", id);
|
error!(target = "udmin", error = ?e, "flow.run_internal: build chain from design_json failed id={}", id);
|
||||||
// 记录失败日志
|
let error_msg = format!("build chain from design_json failed: {}", e);
|
||||||
let (user_id, username) = operator.as_ref().map(|(u, n)| (Some(*u), Some(n.clone()))).unwrap_or((None, None));
|
log_handler.log_error(id, flow_code.as_deref(), &req.input, &error_msg, operator, start, 0).await?;
|
||||||
let _ = flow_run_log_service::create(db, CreateRunLogInput {
|
|
||||||
flow_id: id.to_string(),
|
|
||||||
flow_code: flow_code.clone(),
|
|
||||||
input: Some(serde_json::to_string(&req.input).unwrap_or_default()),
|
|
||||||
output: None,
|
|
||||||
ok: false,
|
|
||||||
logs: Some(format!("build chain from design_json failed: {}", e)),
|
|
||||||
user_id,
|
|
||||||
username,
|
|
||||||
started_at: start,
|
|
||||||
duration_ms: 0,
|
|
||||||
}).await;
|
|
||||||
return Err(e);
|
return Err(e);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let mut ctx = req.input.clone();
|
let mut ctx = req.input.clone();
|
||||||
// Merge node-scoped configs into ctx under ctx.nodes
|
|
||||||
let supplement = flow::mappers::ctx_from_design_json(design);
|
let supplement = flow::mappers::ctx_from_design_json(design);
|
||||||
merge_json(&mut ctx, &supplement);
|
merge_json(&mut ctx, &supplement);
|
||||||
// 解析 executionMode / execution_mode
|
|
||||||
let mode_str = design.get("executionMode").and_then(|v| v.as_str())
|
let mode_str = design.get("executionMode").and_then(|v| v.as_str())
|
||||||
.or_else(|| design.get("execution_mode").and_then(|v| v.as_str()))
|
.or_else(|| design.get("execution_mode").and_then(|v| v.as_str()))
|
||||||
.unwrap_or("sync");
|
.unwrap_or("sync");
|
||||||
exec_mode = parse_execution_mode(mode_str);
|
exec_mode = parse_execution_mode(mode_str);
|
||||||
info!(target = "udmin", "flow.run: ctx prepared from design_json id={} execution_mode={:?}", id, exec_mode);
|
|
||||||
(chain_from_json, ctx)
|
(chain_from_json, ctx)
|
||||||
} else {
|
} else {
|
||||||
info!(target = "udmin", "flow.run: parsing YAML id={}", id);
|
|
||||||
let dsl = match serde_yaml::from_str::<FlowDSL>(&doc.yaml) {
|
let dsl = match serde_yaml::from_str::<FlowDSL>(&doc.yaml) {
|
||||||
Ok(d) => d,
|
Ok(d) => d,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
error!(target = "udmin", error = ?e, "flow.run: parse YAML failed id={}", id);
|
error!(target = "udmin", error = ?e, "flow.run_internal: parse YAML failed id={}", id);
|
||||||
// 记录失败日志
|
let error_msg = format!("parse YAML failed: {}", e);
|
||||||
let (user_id, username) = operator.as_ref().map(|(u, n)| (Some(*u), Some(n.clone()))).unwrap_or((None, None));
|
log_handler.log_error(id, flow_code.as_deref(), &req.input, &error_msg, operator, start, 0).await?;
|
||||||
let _ = flow_run_log_service::create(db, CreateRunLogInput {
|
|
||||||
flow_id: id.to_string(),
|
|
||||||
flow_code: flow_code.clone(),
|
|
||||||
input: Some(serde_json::to_string(&req.input).unwrap_or_default()),
|
|
||||||
output: None,
|
|
||||||
ok: false,
|
|
||||||
logs: Some(format!("parse YAML failed: {}", e)),
|
|
||||||
user_id,
|
|
||||||
username,
|
|
||||||
started_at: start,
|
|
||||||
duration_ms: 0,
|
|
||||||
}).await;
|
|
||||||
return Err(anyhow::Error::new(e).context("invalid flow yaml"));
|
return Err(anyhow::Error::new(e).context("invalid flow yaml"));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
// 从 YAML 读取执行模式
|
|
||||||
if let Some(m) = dsl.execution_mode.as_deref() { exec_mode = parse_execution_mode(m); }
|
if let Some(m) = dsl.execution_mode.as_deref() { exec_mode = parse_execution_mode(m); }
|
||||||
(dsl.into(), req.input.clone())
|
(dsl.into(), req.input.clone())
|
||||||
};
|
};
|
||||||
|
|
||||||
// 若 design_json 解析出的 chain 为空,兜底回退到 YAML
|
// 兜底回退
|
||||||
if chain.nodes.is_empty() {
|
if chain.nodes.is_empty() {
|
||||||
info!(target = "udmin", "flow.run: empty chain from design_json, fallback to YAML id={}", id);
|
|
||||||
if !doc.yaml.trim().is_empty() {
|
if !doc.yaml.trim().is_empty() {
|
||||||
match serde_yaml::from_str::<FlowDSL>(&doc.yaml) {
|
match serde_yaml::from_str::<FlowDSL>(&doc.yaml) {
|
||||||
Ok(dsl) => {
|
Ok(dsl) => {
|
||||||
chain = dsl.clone().into();
|
chain = dsl.clone().into();
|
||||||
// YAML 分支下 ctx = req.input,不再追加 design_json 的补充
|
|
||||||
ctx = req.input.clone();
|
ctx = req.input.clone();
|
||||||
if let Some(m) = dsl.execution_mode.as_deref() { exec_mode = parse_execution_mode(m); }
|
if let Some(m) = dsl.execution_mode.as_deref() { exec_mode = parse_execution_mode(m); }
|
||||||
info!(target = "udmin", "flow.run: fallback YAML parsed id={} execution_mode={:?}", id, exec_mode);
|
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
error!(target = "udmin", error = ?e, "flow.run: fallback parse YAML failed id={}", id);
|
let error_msg = format!("fallback parse YAML failed: {}", e);
|
||||||
// 保留原空 chain,稍后 drive 会再次报错,但这里先返回更明确的错误
|
log_handler.log_error(id, flow_code.as_deref(), &req.input, &error_msg, operator, start, 0).await?;
|
||||||
let (user_id, username) = operator.as_ref().map(|(u, n)| (Some(*u), Some(n.clone()))).unwrap_or((None, None));
|
|
||||||
let _ = flow_run_log_service::create(db, CreateRunLogInput {
|
|
||||||
flow_id: id.to_string(),
|
|
||||||
flow_code: flow_code.clone(),
|
|
||||||
input: Some(serde_json::to_string(&req.input).unwrap_or_default()),
|
|
||||||
output: None,
|
|
||||||
ok: false,
|
|
||||||
logs: Some(format!("fallback parse YAML failed: {}", e)),
|
|
||||||
user_id,
|
|
||||||
username,
|
|
||||||
started_at: start,
|
|
||||||
duration_ms: 0,
|
|
||||||
}).await;
|
|
||||||
return Err(anyhow::anyhow!("empty chain: design_json produced no nodes and YAML parse failed"));
|
return Err(anyhow::anyhow!("empty chain: design_json produced no nodes and YAML parse failed"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// YAML 也为空
|
let error_msg = "empty chain: both design_json and yaml are empty";
|
||||||
let (user_id, username) = operator.as_ref().map(|(u, n)| (Some(*u), Some(n.clone()))).unwrap_or((None, None));
|
log_handler.log_error(id, flow_code.as_deref(), &req.input, error_msg, operator, start, 0).await?;
|
||||||
let _ = flow_run_log_service::create(db, CreateRunLogInput {
|
return Err(anyhow::anyhow!(error_msg));
|
||||||
flow_id: id.to_string(),
|
|
||||||
flow_code: flow_code.clone(),
|
|
||||||
input: Some(serde_json::to_string(&req.input).unwrap_or_default()),
|
|
||||||
output: None,
|
|
||||||
ok: false,
|
|
||||||
logs: Some("empty chain: both design_json and yaml are empty".to_string()),
|
|
||||||
user_id,
|
|
||||||
username,
|
|
||||||
started_at: start,
|
|
||||||
duration_ms: 0,
|
|
||||||
}).await;
|
|
||||||
return Err(anyhow::anyhow!("empty chain: both design_json and yaml are empty"));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// 从全局注册中心获取任务(若未初始化则返回默认注册表)
|
// 任务与引擎
|
||||||
let tasks: flow::task::TaskRegistry = flow::task::get_registry();
|
let tasks: flow::task::TaskRegistry = flow::task::get_registry();
|
||||||
let engine = FlowEngine::builder().tasks(tasks).build();
|
let engine = FlowEngine::builder().tasks(tasks).build();
|
||||||
|
|
||||||
info!(target = "udmin", "flow.run: driving engine id={} nodes={} links={} execution_mode={:?}", id, chain.nodes.len(), chain.links.len(), exec_mode);
|
|
||||||
// 执行
|
// 执行
|
||||||
let drive_res = engine
|
let drive_res = engine
|
||||||
.drive(&chain, ctx, DriveOptions { execution_mode: exec_mode.clone(), ..Default::default() })
|
.drive(&chain, ctx, DriveOptions { execution_mode: exec_mode.clone(), event_tx, ..Default::default() })
|
||||||
.await;
|
.await;
|
||||||
let (ctx, logs) = match drive_res {
|
|
||||||
Ok(r) => r,
|
|
||||||
Err(e) => {
|
|
||||||
error!(target = "udmin", error = ?e, "flow.run: engine drive failed id={}", id);
|
|
||||||
let dur = (Utc::now().with_timezone(&FixedOffset::east_opt(0).unwrap()) - start).num_milliseconds() as i64;
|
|
||||||
let (user_id, username) = operator.as_ref().map(|(u, n)| (Some(*u), Some(n.clone()))).unwrap_or((None, None));
|
|
||||||
let _ = flow_run_log_service::create(db, CreateRunLogInput {
|
|
||||||
flow_id: id.to_string(),
|
|
||||||
flow_code: flow_code.clone(),
|
|
||||||
input: Some(serde_json::to_string(&req.input).unwrap_or_default()),
|
|
||||||
output: None,
|
|
||||||
ok: false,
|
|
||||||
logs: Some(format!("engine drive failed: {}", e)),
|
|
||||||
user_id,
|
|
||||||
username,
|
|
||||||
started_at: start,
|
|
||||||
duration_ms: dur,
|
|
||||||
}).await;
|
|
||||||
return Err(e);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// 兜底移除 variable 节点:不在最终上下文暴露 variable_* 的配置
|
match drive_res {
|
||||||
let mut ctx = ctx;
|
Ok((mut ctx, logs)) => {
|
||||||
|
// 移除 variable 节点
|
||||||
if let serde_json::Value::Object(map) = &mut ctx {
|
if let serde_json::Value::Object(map) = &mut ctx {
|
||||||
if let Some(serde_json::Value::Object(nodes)) = map.get_mut("nodes") {
|
if let Some(serde_json::Value::Object(nodes)) = map.get_mut("nodes") {
|
||||||
let keys: Vec<String> = nodes
|
let keys: Vec<String> = nodes
|
||||||
@ -386,27 +356,45 @@ pub async fn run(db: &Db, id: &str, req: RunReq, operator: Option<(i64, String)>
|
|||||||
for k in keys { nodes.remove(&k); }
|
for k in keys { nodes.remove(&k); }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// 调试:打印处理后的 ctx
|
|
||||||
//info!(target = "udmin", "flow.run: result ctx={}", serde_json::to_string(&ctx).unwrap_or_else(|_| "<serialize ctx failed>".to_string()));
|
|
||||||
|
|
||||||
info!(target = "udmin", "flow.run: done id={}", id);
|
|
||||||
// 写入成功日志
|
|
||||||
let dur = (Utc::now().with_timezone(&FixedOffset::east_opt(0).unwrap()) - start).num_milliseconds() as i64;
|
let dur = (Utc::now().with_timezone(&FixedOffset::east_opt(0).unwrap()) - start).num_milliseconds() as i64;
|
||||||
let (user_id, username) = operator.map(|(u, n)| (Some(u), Some(n))).unwrap_or((None, None));
|
log_handler.log_success(id, flow_code.as_deref(), &req.input, &ctx, &logs, operator, start, dur).await?;
|
||||||
let _ = flow_run_log_service::create(db, CreateRunLogInput {
|
Ok((ctx, logs))
|
||||||
flow_id: id.to_string(),
|
}
|
||||||
flow_code: flow_code.clone(),
|
Err(e) => {
|
||||||
input: Some(serde_json::to_string(&req.input).unwrap_or_default()),
|
error!(target = "udmin", error = ?e, "flow.run_internal: engine drive failed id={}", id);
|
||||||
output: Some(serde_json::to_string(&ctx).unwrap_or_default()),
|
let dur = (Utc::now().with_timezone(&FixedOffset::east_opt(0).unwrap()) - start).num_milliseconds() as i64;
|
||||||
ok: true,
|
// 优先记录详细错误(包含部分 ctx 与累计 logs)
|
||||||
logs: Some(serde_json::to_string(&logs).unwrap_or_default()),
|
if let Some(de) = e.downcast_ref::<DriveError>().cloned() {
|
||||||
user_id,
|
log_handler
|
||||||
username,
|
.log_error_detail(
|
||||||
started_at: start,
|
id,
|
||||||
duration_ms: dur,
|
flow_code.as_deref(),
|
||||||
}).await;
|
&req.input,
|
||||||
Ok(RunResult { ok: true, ctx, logs })
|
&de.ctx,
|
||||||
|
&de.logs,
|
||||||
|
&de.message,
|
||||||
|
operator,
|
||||||
|
start,
|
||||||
|
dur,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
} else {
|
||||||
|
let error_msg = format!("engine drive failed: {}", e);
|
||||||
|
log_handler
|
||||||
|
.log_error(
|
||||||
|
id,
|
||||||
|
flow_code.as_deref(),
|
||||||
|
&req.input,
|
||||||
|
&error_msg,
|
||||||
|
operator,
|
||||||
|
start,
|
||||||
|
dur,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
Err(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract_name(yaml: &str) -> Option<String> {
|
fn extract_name(yaml: &str) -> Option<String> {
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
pub fn hash_password(plain: &str) -> anyhow::Result<String> {
|
pub fn hash_password(plain: &str) -> anyhow::Result<String> {
|
||||||
use argon2::{password_hash::{SaltString, PasswordHasher}, Argon2};
|
use argon2::{password_hash::{SaltString, PasswordHasher, rand_core::OsRng}, Argon2};
|
||||||
let salt = SaltString::generate(&mut rand::thread_rng());
|
let salt = SaltString::generate(&mut OsRng);
|
||||||
let hashed = Argon2::default()
|
let hashed = Argon2::default()
|
||||||
.hash_password(plain.as_bytes(), &salt)
|
.hash_password(plain.as_bytes(), &salt)
|
||||||
.map_err(|e| anyhow::anyhow!(e.to_string()))?
|
.map_err(|e| anyhow::anyhow!(e.to_string()))?
|
||||||
|
|||||||
@ -2,3 +2,4 @@
|
|||||||
# https://curl.se/docs/http-cookies.html
|
# https://curl.se/docs/http-cookies.html
|
||||||
# This file was generated by libcurl! Edit at your own risk.
|
# This file was generated by libcurl! Edit at your own risk.
|
||||||
|
|
||||||
|
#HttpOnly_127.0.0.1 FALSE / FALSE 1759594896 refresh_token eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJzdWIiOiJhZG1pbiIsInVpZCI6MSwiaXNzIjoidWRtaW4iLCJleHAiOjE3NTk1OTQ4OTYsInR5cCI6InJlZnJlc2gifQ.zH6gGProbzh4U7RzgYNH4DqD2-EyzvotbkGUfMBzp4k
|
||||||
|
|||||||
@ -3,7 +3,7 @@
|
|||||||
* SPDX-License-Identifier: MIT
|
* SPDX-License-Identifier: MIT
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { FC, useContext, useEffect, useState } from 'react';
|
import { FC, useContext, useEffect, useRef, useState } from 'react';
|
||||||
|
|
||||||
import classnames from 'classnames';
|
import classnames from 'classnames';
|
||||||
import { useService, I18n } from '@flowgram.ai/free-layout-editor';
|
import { useService, I18n } from '@flowgram.ai/free-layout-editor';
|
||||||
@ -41,6 +41,21 @@ export const TestRunSidePanel: FC<TestRunSidePanelProps> = ({ visible, onCancel
|
|||||||
| undefined
|
| undefined
|
||||||
>();
|
>();
|
||||||
|
|
||||||
|
// 模式切换:SSE 流式 or 普通 HTTP
|
||||||
|
const [streamMode, _setStreamMode] = useState<boolean>(() => {
|
||||||
|
const saved = localStorage.getItem('testrun-stream-mode');
|
||||||
|
return saved ? JSON.parse(saved) : true;
|
||||||
|
});
|
||||||
|
const setStreamMode = (checked: boolean) => {
|
||||||
|
_setStreamMode(checked);
|
||||||
|
localStorage.setItem('testrun-stream-mode', JSON.stringify(checked));
|
||||||
|
};
|
||||||
|
|
||||||
|
// 流式渲染:实时上下文与日志
|
||||||
|
const [streamCtx, setStreamCtx] = useState<any | undefined>();
|
||||||
|
const [streamLogs, setStreamLogs] = useState<string[]>([]);
|
||||||
|
const cancelRef = useRef<(() => void) | null>(null);
|
||||||
|
|
||||||
// en - Use localStorage to persist the JSON mode state
|
// en - Use localStorage to persist the JSON mode state
|
||||||
const [inputJSONMode, _setInputJSONMode] = useState(() => {
|
const [inputJSONMode, _setInputJSONMode] = useState(() => {
|
||||||
const savedMode = localStorage.getItem('testrun-input-json-mode');
|
const savedMode = localStorage.getItem('testrun-input-json-mode');
|
||||||
@ -63,11 +78,13 @@ export const TestRunSidePanel: FC<TestRunSidePanelProps> = ({ visible, onCancel
|
|||||||
|
|
||||||
const onTestRun = async () => {
|
const onTestRun = async () => {
|
||||||
if (isRunning) {
|
if (isRunning) {
|
||||||
// 后端运行不可取消,这里直接忽略重复点击
|
// 运行中,忽略重复点击
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
setResult(undefined);
|
setResult(undefined);
|
||||||
setErrors(undefined);
|
setErrors(undefined);
|
||||||
|
setStreamCtx(undefined);
|
||||||
|
setStreamLogs([]);
|
||||||
setRunning(true);
|
setRunning(true);
|
||||||
try {
|
try {
|
||||||
// 运行前保存(静默),确保后端 YAML 与编辑器一致;若保存失败则不继续运行
|
// 运行前保存(静默),确保后端 YAML 与编辑器一致;若保存失败则不继续运行
|
||||||
@ -76,9 +93,40 @@ export const TestRunSidePanel: FC<TestRunSidePanelProps> = ({ visible, onCancel
|
|||||||
setErrors([I18n.t('Save failed, cannot run')]);
|
setErrors([I18n.t('Save failed, cannot run')]);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (streamMode) {
|
||||||
|
const { cancel, done } = customService.runStream(values, {
|
||||||
|
onNode: (evt) => {
|
||||||
|
if (evt.ctx) setStreamCtx((prev: any) => ({ ...(prev || {}), ...(evt.ctx || {}) }));
|
||||||
|
if (evt.logs && evt.logs.length) setStreamLogs((prev: string[]) => [...prev, ...evt.logs!]);
|
||||||
|
},
|
||||||
|
onError: (evt) => {
|
||||||
|
const msg = evt.message || I18n.t('Run failed');
|
||||||
|
setErrors((prev) => [...(prev || []), msg]);
|
||||||
|
},
|
||||||
|
onDone: (evt) => {
|
||||||
|
setResult({ ok: evt.ok, ctx: evt.ctx, logs: evt.logs });
|
||||||
|
},
|
||||||
|
onFatal: (err) => {
|
||||||
|
setErrors((prev) => [...(prev || []), err.message || String(err)]);
|
||||||
|
setRunning(false);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
cancelRef.current = cancel;
|
||||||
|
|
||||||
|
const finished = await done;
|
||||||
|
if (finished) {
|
||||||
|
setResult(finished as any);
|
||||||
|
} else {
|
||||||
|
// 流结束但未收到 done 事件,给出提示
|
||||||
|
setErrors((prev) => [...(prev || []), I18n.t('Stream terminated without completion')]);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// 普通 HTTP 一次性运行
|
||||||
|
try {
|
||||||
const runRes = await customService.run(values);
|
const runRes = await customService.run(values);
|
||||||
if (runRes) {
|
if (runRes) {
|
||||||
// 若后端返回 ok=false,则视为失败并展示失败信息与日志
|
|
||||||
if ((runRes as any).ok === false) {
|
if ((runRes as any).ok === false) {
|
||||||
setResult(runRes as any);
|
setResult(runRes as any);
|
||||||
const err = extractErrorMsg((runRes as any).logs) || I18n.t('Run failed');
|
const err = extractErrorMsg((runRes as any).logs) || I18n.t('Run failed');
|
||||||
@ -91,14 +139,29 @@ export const TestRunSidePanel: FC<TestRunSidePanelProps> = ({ visible, onCancel
|
|||||||
}
|
}
|
||||||
} catch (e: any) {
|
} catch (e: any) {
|
||||||
setErrors([e?.message || I18n.t('Run failed')]);
|
setErrors([e?.message || I18n.t('Run failed')]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (e: any) {
|
||||||
|
setErrors([e?.message || I18n.t('Run failed')]);
|
||||||
} finally {
|
} finally {
|
||||||
setRunning(false);
|
setRunning(false);
|
||||||
|
cancelRef.current = null;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const onCancelRun = () => {
|
||||||
|
try { cancelRef.current?.(); } catch {}
|
||||||
|
setRunning(false);
|
||||||
|
};
|
||||||
|
|
||||||
const onClose = async () => {
|
const onClose = async () => {
|
||||||
setValues({});
|
setValues({});
|
||||||
|
if (isRunning) {
|
||||||
|
if (streamMode) onCancelRun();
|
||||||
|
}
|
||||||
setRunning(false);
|
setRunning(false);
|
||||||
|
setStreamCtx(undefined);
|
||||||
|
setStreamLogs([]);
|
||||||
onCancel();
|
onCancel();
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -119,6 +182,20 @@ export const TestRunSidePanel: FC<TestRunSidePanelProps> = ({ visible, onCancel
|
|||||||
<div className={styles['testrun-panel-running']}>
|
<div className={styles['testrun-panel-running']}>
|
||||||
<IconSpin spin size="large" />
|
<IconSpin spin size="large" />
|
||||||
<div className={styles.text}>{I18n.t('Running...')}</div>
|
<div className={styles.text}>{I18n.t('Running...')}</div>
|
||||||
|
{/* 实时输出(仅流式模式显示) */}
|
||||||
|
{streamMode && (
|
||||||
|
<>
|
||||||
|
{errors?.length ? (
|
||||||
|
<div className={styles.error}>
|
||||||
|
{errors.map((e) => (
|
||||||
|
<div key={e}>{e}</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
) : null}
|
||||||
|
<NodeStatusGroup title={I18n.t('Context') + ' (Live)'} data={streamCtx} optional disableCollapse />
|
||||||
|
<NodeStatusGroup title={I18n.t('Logs') + ' (Live)'} data={streamLogs} optional disableCollapse />
|
||||||
|
</>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -141,6 +218,12 @@ export const TestRunSidePanel: FC<TestRunSidePanelProps> = ({ visible, onCancel
|
|||||||
onChange={(checked: boolean) => setInputJSONMode(checked)}
|
onChange={(checked: boolean) => setInputJSONMode(checked)}
|
||||||
size="small"
|
size="small"
|
||||||
/>
|
/>
|
||||||
|
<div>{I18n.t('Streaming Mode')}</div>
|
||||||
|
<Switch
|
||||||
|
checked={streamMode}
|
||||||
|
onChange={(checked: boolean) => setStreamMode(checked)}
|
||||||
|
size="small"
|
||||||
|
/>
|
||||||
</div>
|
</div>
|
||||||
{renderStatus}
|
{renderStatus}
|
||||||
{errors?.map((e) => (
|
{errors?.map((e) => (
|
||||||
@ -153,6 +236,13 @@ export const TestRunSidePanel: FC<TestRunSidePanelProps> = ({ visible, onCancel
|
|||||||
) : (
|
) : (
|
||||||
<TestRunForm values={values} setValues={setValues} />
|
<TestRunForm values={values} setValues={setValues} />
|
||||||
)}
|
)}
|
||||||
|
{/* 运行中(流式)时,直接在表单区域下方展示实时输出,而不是覆盖整块内容 */}
|
||||||
|
{streamMode && isRunning && (
|
||||||
|
<>
|
||||||
|
<NodeStatusGroup title={I18n.t('Context') + ' (Live)'} data={streamCtx} optional disableCollapse />
|
||||||
|
<NodeStatusGroup title={I18n.t('Logs') + ' (Live)'} data={streamLogs} optional disableCollapse />
|
||||||
|
</>
|
||||||
|
)}
|
||||||
{/* 展示后端返回的执行信息 */}
|
{/* 展示后端返回的执行信息 */}
|
||||||
<NodeStatusGroup title={I18n.t('Context')} data={result?.ctx} optional disableCollapse />
|
<NodeStatusGroup title={I18n.t('Context')} data={result?.ctx} optional disableCollapse />
|
||||||
<NodeStatusGroup title={I18n.t('Logs')} data={result?.logs} optional disableCollapse />
|
<NodeStatusGroup title={I18n.t('Logs')} data={result?.logs} optional disableCollapse />
|
||||||
@ -161,8 +251,12 @@ export const TestRunSidePanel: FC<TestRunSidePanelProps> = ({ visible, onCancel
|
|||||||
|
|
||||||
const renderButton = (
|
const renderButton = (
|
||||||
<Button
|
<Button
|
||||||
onClick={onTestRun}
|
onClick={isRunning ? (streamMode ? onCancelRun : undefined) : onTestRun}
|
||||||
icon={isRunning ? <IconCancel /> : <IconPlay size="small" />}
|
disabled={isRunning && !streamMode}
|
||||||
|
icon={
|
||||||
|
// 仅用按钮转圈提示运行中
|
||||||
|
isRunning ? <IconSpin spin size="small" /> : <IconPlay size="small" />
|
||||||
|
}
|
||||||
className={classnames(styles.button, {
|
className={classnames(styles.button, {
|
||||||
[styles.running]: isRunning,
|
[styles.running]: isRunning,
|
||||||
[styles.default]: !isRunning,
|
[styles.default]: !isRunning,
|
||||||
@ -204,7 +298,8 @@ export const TestRunSidePanel: FC<TestRunSidePanelProps> = ({ visible, onCancel
|
|||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<div className={styles['testrun-panel-content']}>
|
<div className={styles['testrun-panel-content']}>
|
||||||
{isRunning ? renderRunning : renderForm}
|
{/* 始终展示表单与结果区域;运行中不再使用覆盖层 */}
|
||||||
|
{renderForm}
|
||||||
</div>
|
</div>
|
||||||
<div className={styles['testrun-panel-footer']}>{renderButton}</div>
|
<div className={styles['testrun-panel-footer']}>{renderButton}</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -73,6 +73,8 @@ export function Editor() {
|
|||||||
const parsed = parseFlowYaml(payload?.yaml || '')
|
const parsed = parseFlowYaml(payload?.yaml || '')
|
||||||
nextDoc = parsed?.doc as any
|
nextDoc = parsed?.doc as any
|
||||||
}
|
}
|
||||||
|
// 新增:将后端的 design_json 节点类型从 javascript 还原为 code,确保前端能够使用 Code 节点表单
|
||||||
|
nextDoc = transformDesignJsonFromBackend(nextDoc)
|
||||||
// 兜底:如果后端没有任何流程数据(空 YAML/空 design_json),使用最小流程(包含开始与结束)
|
// 兜底:如果后端没有任何流程数据(空 YAML/空 design_json),使用最小流程(包含开始与结束)
|
||||||
if (!nextDoc || !Array.isArray((nextDoc as any).nodes) || (nextDoc as any).nodes.length === 0) {
|
if (!nextDoc || !Array.isArray((nextDoc as any).nodes) || (nextDoc as any).nodes.length === 0) {
|
||||||
if (mounted) setDoc(MINIMAL_DOC)
|
if (mounted) setDoc(MINIMAL_DOC)
|
||||||
@ -110,3 +112,21 @@ export function Editor() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export default Editor;
|
export default Editor;
|
||||||
|
|
||||||
|
// 新增:将后端存储的 javascript 类型还原为前端 UI 的 code 类型
|
||||||
|
function transformDesignJsonFromBackend(json: any): any {
|
||||||
|
try {
|
||||||
|
const clone = JSON.parse(JSON.stringify(json));
|
||||||
|
if (Array.isArray(clone?.nodes)) {
|
||||||
|
clone.nodes = clone.nodes.map((n: any) => {
|
||||||
|
if (n && n.type === 'javascript') {
|
||||||
|
return { ...n, type: 'code' };
|
||||||
|
}
|
||||||
|
return n;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return clone;
|
||||||
|
} catch {
|
||||||
|
return json;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@ -366,6 +366,7 @@ export function useEditorProps(
|
|||||||
'Running...': '运行中...',
|
'Running...': '运行中...',
|
||||||
'Input Form': '输入表单',
|
'Input Form': '输入表单',
|
||||||
'JSON Mode': 'JSON 模式',
|
'JSON Mode': 'JSON 模式',
|
||||||
|
'Streaming Mode': '流式模式',
|
||||||
'Context': '上下文',
|
'Context': '上下文',
|
||||||
'Logs': '日志',
|
'Logs': '日志',
|
||||||
'Please input integer': '请输入整数',
|
'Please input integer': '请输入整数',
|
||||||
@ -427,6 +428,7 @@ export function useEditorProps(
|
|||||||
'Rows': 'Rows',
|
'Rows': 'Rows',
|
||||||
'First Row': 'First Row',
|
'First Row': 'First Row',
|
||||||
'Affected Rows': 'Affected Rows',
|
'Affected Rows': 'Affected Rows',
|
||||||
|
'Streaming Mode': 'Streaming Mode',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|||||||
@ -14,9 +14,16 @@ import { Toast } from '@douyinfe/semi-ui';
|
|||||||
import { I18n } from '@flowgram.ai/free-layout-editor';
|
import { I18n } from '@flowgram.ai/free-layout-editor';
|
||||||
import api, { type ApiResp } from '../../utils/axios';
|
import api, { type ApiResp } from '../../utils/axios';
|
||||||
import { stringifyFlowDoc } from '../utils/yaml';
|
import { stringifyFlowDoc } from '../utils/yaml';
|
||||||
|
import { postSSE } from '../../utils/sse';
|
||||||
|
|
||||||
interface RunResult { ok: boolean; ctx: any; logs: string[] }
|
interface RunResult { ok: boolean; ctx: any; logs: string[] }
|
||||||
|
|
||||||
|
// 与后端 StreamEvent 保持一致(serde(tag = "type"))
|
||||||
|
export type StreamEvent =
|
||||||
|
| { type: 'node'; node_id?: string; ctx?: any; logs?: string[] }
|
||||||
|
| { type: 'done'; ok: boolean; ctx: any; logs: string[] }
|
||||||
|
| { type: 'error'; message: string };
|
||||||
|
|
||||||
// 兼容 BrowserRouter 与 HashRouter:优先从 search 获取,若无则从 hash 的查询串中获取
|
// 兼容 BrowserRouter 与 HashRouter:优先从 search 获取,若无则从 hash 的查询串中获取
|
||||||
function getFlowIdFromUrl(): string {
|
function getFlowIdFromUrl(): string {
|
||||||
const searchId = new URLSearchParams(window.location.search).get('id');
|
const searchId = new URLSearchParams(window.location.search).get('id');
|
||||||
@ -116,4 +123,40 @@ export class CustomService {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 新增:SSE 流式运行,返回取消函数与完成 Promise
|
||||||
|
runStream(input: any = {}, handlers?: { onNode?: (e: StreamEvent & { type: 'node' }) => void; onDone?: (e: StreamEvent & { type: 'done' }) => void; onError?: (e: StreamEvent & { type: 'error' }) => void; onFatal?: (err: Error) => void; }) {
|
||||||
|
const id = getFlowIdFromUrl();
|
||||||
|
if (!id) {
|
||||||
|
const err = new Error(I18n.t('Flow ID is missing, cannot run'));
|
||||||
|
handlers?.onFatal?.(err);
|
||||||
|
return { cancel: () => {}, done: Promise.resolve<RunResult | null>(null) } as const;
|
||||||
|
}
|
||||||
|
|
||||||
|
const base = (api.defaults.baseURL || '') as string;
|
||||||
|
const url = base ? `${base}/flows/${id}/run/stream` : `/flows/${id}/run/stream`;
|
||||||
|
|
||||||
|
const { cancel, done } = postSSE<RunResult | null>(url, { input }, {
|
||||||
|
onMessage: (json: any) => {
|
||||||
|
try {
|
||||||
|
const evt = json as StreamEvent
|
||||||
|
if (evt.type === 'node') {
|
||||||
|
handlers?.onNode?.(evt as any)
|
||||||
|
return undefined
|
||||||
|
}
|
||||||
|
if (evt.type === 'error') {
|
||||||
|
handlers?.onError?.(evt as any)
|
||||||
|
return undefined
|
||||||
|
}
|
||||||
|
if (evt.type === 'done') {
|
||||||
|
handlers?.onDone?.(evt as any)
|
||||||
|
return { ok: evt.ok, ctx: evt.ctx, logs: evt.logs }
|
||||||
|
}
|
||||||
|
} catch (_) {}
|
||||||
|
return undefined
|
||||||
|
},
|
||||||
|
onFatal: (e) => handlers?.onFatal?.(e),
|
||||||
|
})
|
||||||
|
return { cancel, done } as const;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
114
frontend/src/utils/sse.ts
Normal file
114
frontend/src/utils/sse.ts
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
import api, { type ApiResp } from './axios'
|
||||||
|
import { getToken } from './token'
|
||||||
|
|
||||||
|
export interface PostSSEHandlers<T> {
|
||||||
|
onMessage: (json: any) => T | void
|
||||||
|
onFatal?: (err: Error) => void
|
||||||
|
headers?: Record<string, string>
|
||||||
|
}
|
||||||
|
|
||||||
|
// 通用:带鉴权与一次性 refresh 的 POST SSE 工具
|
||||||
|
export function postSSE<T = unknown>(url: string, body: unknown, handlers: PostSSEHandlers<T>) {
|
||||||
|
const controller = new AbortController()
|
||||||
|
let aborted = false
|
||||||
|
|
||||||
|
const doFetch = async (): Promise<Response> => {
|
||||||
|
const token = getToken()
|
||||||
|
const baseHeaders: Record<string, string> = { 'Content-Type': 'application/json' }
|
||||||
|
if (token) baseHeaders['Authorization'] = `Bearer ${token}`
|
||||||
|
const headers = { ...baseHeaders, ...(handlers.headers || {}) }
|
||||||
|
|
||||||
|
const resp = await fetch(url, {
|
||||||
|
method: 'POST',
|
||||||
|
headers,
|
||||||
|
body: JSON.stringify(body ?? {}),
|
||||||
|
signal: controller.signal,
|
||||||
|
credentials: 'include',
|
||||||
|
})
|
||||||
|
if (resp.status === 401) {
|
||||||
|
try {
|
||||||
|
const { data } = await api.get<ApiResp<{ access_token: string }>>('/auth/refresh')
|
||||||
|
if (data?.code === 0) {
|
||||||
|
const token2 = getToken()
|
||||||
|
if (token2) headers['Authorization'] = `Bearer ${token2}`
|
||||||
|
return await fetch(url, {
|
||||||
|
method: 'POST',
|
||||||
|
headers,
|
||||||
|
body: JSON.stringify(body ?? {}),
|
||||||
|
signal: controller.signal,
|
||||||
|
credentials: 'include',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} catch {}
|
||||||
|
}
|
||||||
|
return resp
|
||||||
|
}
|
||||||
|
|
||||||
|
const done = (async (): Promise<T | null> => {
|
||||||
|
try {
|
||||||
|
const resp = await doFetch()
|
||||||
|
if (!resp.ok || !resp.body) {
|
||||||
|
try {
|
||||||
|
const data = await resp.json()
|
||||||
|
const msg = (data && (data.message || data.msg)) || `SSE request failed: ${resp.status}`
|
||||||
|
throw new Error(msg)
|
||||||
|
} catch {
|
||||||
|
throw new Error(`SSE request failed: ${resp.status}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const reader = resp.body.getReader()
|
||||||
|
const decoder = new TextDecoder('utf-8')
|
||||||
|
let buffer = ''
|
||||||
|
|
||||||
|
const flush = (chunk: string): T | null => {
|
||||||
|
buffer += chunk
|
||||||
|
const parts = buffer.split(/\n\n/)
|
||||||
|
buffer = parts.pop() || ''
|
||||||
|
for (const part of parts) {
|
||||||
|
const dataLines = part
|
||||||
|
.split(/\n/)
|
||||||
|
.filter((l) => l.startsWith('data:'))
|
||||||
|
.map((l) => l.slice(5).trimStart())
|
||||||
|
if (!dataLines.length) continue
|
||||||
|
// 兼容 CRLF:去除行尾的 \r,整体 trim 以防止 JSON.parse 失败
|
||||||
|
const payloadRaw = dataLines.join('\n')
|
||||||
|
const payload = payloadRaw.replace(/\r+$/g, '').trim()
|
||||||
|
try {
|
||||||
|
const json = JSON.parse(payload)
|
||||||
|
const ret = handlers.onMessage(json)
|
||||||
|
if (typeof ret !== 'undefined') {
|
||||||
|
// 收到终止信号:主动中止连接,避免悬挂
|
||||||
|
aborted = true
|
||||||
|
try { controller.abort() } catch {}
|
||||||
|
return ret as T
|
||||||
|
}
|
||||||
|
} catch (_) {
|
||||||
|
// 单条事件解析失败:忽略
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
while (!aborted) {
|
||||||
|
const { value, done } = await reader.read()
|
||||||
|
if (done) break
|
||||||
|
const text = decoder.decode(value, { stream: true })
|
||||||
|
const ret = flush(text)
|
||||||
|
if (ret !== null) return ret
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
} catch (e: any) {
|
||||||
|
// 发生致命错误:通知回调并确保中止连接
|
||||||
|
try { controller.abort() } catch {}
|
||||||
|
aborted = true
|
||||||
|
handlers.onFatal?.(e instanceof Error ? e : new Error(String(e)))
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
})()
|
||||||
|
|
||||||
|
const cancel = () => {
|
||||||
|
aborted = true
|
||||||
|
try { controller.abort() } catch {}
|
||||||
|
}
|
||||||
|
return { cancel, done } as const
|
||||||
|
}
|
||||||
File diff suppressed because one or more lines are too long
@ -8,7 +8,7 @@ export default defineConfig(({ mode }) => {
|
|||||||
const env = loadEnv(mode, '.', '')
|
const env = loadEnv(mode, '.', '')
|
||||||
const port = Number(env.VITE_PORT || 5173)
|
const port = Number(env.VITE_PORT || 5173)
|
||||||
const open = String(env.VITE_OPEN ?? 'true').toLowerCase() === 'true' || env.VITE_OPEN === '1'
|
const open = String(env.VITE_OPEN ?? 'true').toLowerCase() === 'true' || env.VITE_OPEN === '1'
|
||||||
const proxyTarget = env.VITE_ADMIN_PROXY_PATH || 'http://127.0.0.1:8080'
|
const proxyTarget = env.VITE_ADMIN_PROXY_PATH || 'http://127.0.0.1:9898'
|
||||||
|
|
||||||
return {
|
return {
|
||||||
plugins: [
|
plugins: [
|
||||||
@ -30,7 +30,50 @@ export default defineConfig(({ mode }) => {
|
|||||||
proxy: {
|
proxy: {
|
||||||
'/api': {
|
'/api': {
|
||||||
target: proxyTarget,
|
target: proxyTarget,
|
||||||
changeOrigin: true
|
changeOrigin: true,
|
||||||
|
// 为 SSE 透传加固:禁用超时并保持连接
|
||||||
|
proxyTimeout: 0,
|
||||||
|
timeout: 0,
|
||||||
|
headers: { 'Connection': 'keep-alive' },
|
||||||
|
// 关键:在 dev 代理层面禁止缓冲/缓存,强制以 chunk 方式向浏览器侧回传,避免一次性聚合
|
||||||
|
configure: (proxy: any) => {
|
||||||
|
// 移除 Accept-Encoding,避免后端压缩导致中间件缓冲
|
||||||
|
proxy.on('proxyReq', (proxyReq: any, req: any) => {
|
||||||
|
const url: string = req?.url || ''
|
||||||
|
if (url.includes('/run/stream')) {
|
||||||
|
try {
|
||||||
|
if (typeof proxyReq.removeHeader === 'function') proxyReq.removeHeader('accept-encoding')
|
||||||
|
proxyReq.setHeader('accept', 'text/event-stream')
|
||||||
|
proxyReq.setHeader('connection', 'keep-alive')
|
||||||
|
} catch {}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
proxy.on('proxyRes', (proxyRes: any, req: any, res: any) => {
|
||||||
|
const url: string = req?.url || ''
|
||||||
|
const ct: string = String(proxyRes.headers?.['content-type'] || '')
|
||||||
|
const isSse = url.includes('/run/stream') || ct.includes('text/event-stream')
|
||||||
|
if (!isSse) return
|
||||||
|
try {
|
||||||
|
// 直接改写后端返回头,确保为 SSE 且无长度/压缩
|
||||||
|
proxyRes.headers['content-type'] = 'text/event-stream; charset=utf-8'
|
||||||
|
proxyRes.headers['cache-control'] = 'no-cache'
|
||||||
|
proxyRes.headers['pragma'] = 'no-cache'
|
||||||
|
proxyRes.headers['x-accel-buffering'] = 'no'
|
||||||
|
delete proxyRes.headers['content-length']
|
||||||
|
delete proxyRes.headers['content-encoding']
|
||||||
|
|
||||||
|
// 同步确保 devServer 给浏览器的头一致,并尽早发送
|
||||||
|
res.setHeader('Content-Type', 'text/event-stream; charset=utf-8')
|
||||||
|
res.setHeader('Cache-Control', 'no-cache')
|
||||||
|
res.setHeader('Pragma', 'no-cache')
|
||||||
|
res.setHeader('X-Accel-Buffering', 'no')
|
||||||
|
if (typeof res.removeHeader === 'function') res.removeHeader('Content-Length')
|
||||||
|
if (typeof res.removeHeader === 'function') res.removeHeader('Content-Encoding')
|
||||||
|
if (typeof res.flushHeaders === 'function') res.flushHeaders()
|
||||||
|
} catch {}
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user