From e8297d099629aec9a0b8981820a8901d448565fb Mon Sep 17 00:00:00 2001 From: duguwanglong Date: Fri, 10 Apr 2026 12:54:38 +0800 Subject: [PATCH 1/3] feat: add DingTalk channel plugin with OpenClaw connector - Add dingtalk channel plugin files to .flocks/plugins/channels/dingtalk/ including dingtalk.py, runner.ts and dingtalk-openclaw-connector - Add channels/dingtalk/ as the source copy alongside runtime plugin - Pin axios to 1.14.0 in connector package.json for version stability - Initialize root workspace package.json / package-lock.json Made-with: Cursor --- .../dingtalk-openclaw-connector/.gitignore | 17 + .../dingtalk-openclaw-connector/.npmignore | 4 + .../dingtalk-openclaw-connector/LICENSE | 21 + .../dingtalk-openclaw-connector/README.md | 665 +++ .../dingtalk-openclaw-connector/bun.lock | 154 + .../openclaw.plugin.json | 15 + .../dingtalk-openclaw-connector/package.json | 60 + .../dingtalk-openclaw-connector/plugin.ts | 3867 +++++++++++++++++ .flocks/plugins/channels/dingtalk/dingtalk.py | 249 ++ .flocks/plugins/channels/dingtalk/runner.ts | 355 ++ 10 files changed, 5407 insertions(+) create mode 100644 .flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/.gitignore create mode 100644 .flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/.npmignore create mode 100644 .flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/LICENSE create mode 100644 .flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/README.md create mode 100644 .flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/bun.lock create mode 100644 .flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/openclaw.plugin.json create mode 100644 .flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/package.json create mode 100644 .flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/plugin.ts create mode 100644 .flocks/plugins/channels/dingtalk/dingtalk.py create mode 100644 .flocks/plugins/channels/dingtalk/runner.ts diff --git a/.flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/.gitignore b/.flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/.gitignore new file mode 100644 index 0000000..b22138c --- /dev/null +++ b/.flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/.gitignore @@ -0,0 +1,17 @@ +__pycache__/ +*.py[cod] +*.egg-info/ +dist/ +build/ +.venv/ +.env +*.egg +node_modules/ +package-lock.json + +# IDE +.idea/ + +openclaw/ +.aone_copilot/ +AGENTS.md \ No newline at end of file diff --git a/.flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/.npmignore b/.flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/.npmignore new file mode 100644 index 0000000..b32fe0f --- /dev/null +++ b/.flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/.npmignore @@ -0,0 +1,4 @@ +.git/ +.claude/ +.env +.gitignore diff --git a/.flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/LICENSE b/.flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/LICENSE new file mode 100644 index 0000000..ad00cc5 --- /dev/null +++ b/.flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2026 DingTalk Real Team + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/.flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/README.md b/.flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/README.md new file mode 100644 index 0000000..2070fa1 --- /dev/null +++ b/.flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/README.md @@ -0,0 +1,665 @@ +# DingTalk OpenClaw Connector + +以下提供两种方案连接到 [OpenClaw](https://openclaw.ai) Gateway,分别是钉钉机器人和钉钉 DEAP Agent。 + +> 📝 **版本信息**:当前版本 v0.7.5 | [查看变更日志](CHANGELOG.md) | [发布说明](docs/RELEASE_NOTES_V0.7.4.md) | [发布指南](RELEASE.md) + +## 快速导航 + +| 方案 | 名称 | 详情 | +|------|------|------| +| 方案一 | 钉钉机器人集成 | [查看详情](#方案一钉钉机器人集成) | +| 方案二 | 钉钉 DEAP Agent 集成 | [查看详情](#方案二钉钉-deap-agent-集成) | + +# 方案一:钉钉机器人集成 +将钉钉机器人连接到 [OpenClaw](https://openclaw.ai) Gateway,支持 AI Card 流式响应和会话管理。 + +## 特性 + +- ✅ **AI Card 流式响应** - 打字机效果,实时显示 AI 回复 +- ✅ **会话持久化** - 同一用户的多轮对话共享上下文 +- ✅ **会话与记忆隔离** - 按单聊/群聊/群区分 session,不同场景下的对话上下文互不干扰,可配置跨会话记忆共享 +- ✅ **超时自动新会话** - 默认 30 分钟无活动自动开启新对话 +- ✅ **手动新会话** - 发送 `/new` 或 `新会话` 清空对话历史 +- ✅ **图片自动上传** - 本地图片路径自动上传到钉钉 +- ✅ **主动发送消息** - 支持主动给钉钉个人或群发送消息 +- ✅ **富媒体接收** - 支持接收 JPEG/PNG 图片消息,自动下载并传递给视觉模型 +- ✅ **文件附件提取** - 支持解析 .docx、.pdf、纯文本文件(.txt、.md、.json 等)和二进制文件(.xlsx、.pptx、.zip 等) +- ✅ **音频消息支持** - 支持发送音频消息,支持多种格式(mp3、wav、amr、ogg),自动提取音频时长,支持通过标记或文件附件方式发送 +- ✅ **钉钉文档 API** - 支持创建、追加、搜索、列举钉钉文档 +- ✅ **多 Agent 路由** - 支持一个连接器实例连接多个 Agent,多个钉钉机器人可分别绑定到不同 Agent,实现角色分工和专业化服务 +- ✅ **Markdown 表格转换** - 自动将 Markdown 表格转换为钉钉支持的文本格式,提升消息可读性 +- ✅ **异步模式** - 立即回执用户消息,后台处理任务,然后主动推送最终结果作为独立消息(可选) + + +## 架构 + +```mermaid +graph LR + subgraph "钉钉" + A["用户发消息"] --> B["Stream WebSocket"] + E["AI 流式卡片"] --> F["用户看到回复"] + end + + subgraph "Connector" + B --> C["消息处理器"] + C -->|"HTTP SSE"| D["Gateway /v1/chat/completions"] + D -->|"流式 chunk"| C + C -->|"streaming API"| E + end +``` + +## 效果 + +image +image + +## 安装 + +### 1. 安装插件 + +```bash +# 通过 npm 安装(推荐) +openclaw plugins install @dingtalk-real-ai/dingtalk-connector + +# 或通过 Git 安装 +openclaw plugins install https://github.com/DingTalk-Real-AI/dingtalk-openclaw-connector.git + +# 升级插件 +openclaw plugins update dingtalk-connector + +# 或本地开发模式 +git clone https://github.com/DingTalk-Real-AI/dingtalk-openclaw-connector.git +cd dingtalk-openclaw-connector +npm install +openclaw plugins install -l . +``` + +> **⚠️ 旧版本升级提示:** 如果你之前安装过旧版本的 Clawdbot/Moltbot 或 0.4.0 以下版本的 connector 插件,可能会出现兼容性问题,请参考 [Q: 升级后出现插件加载异常或配置不生效](#q-升级后出现插件加载异常或配置不生效)。 + +### 2. 配置 + +在 `~/.openclaw/openclaw.json` 中添加: + +```json5 +{ + "channels": { + "dingtalk-connector": { + "enabled": true, + "clientId": "dingxxxxxxxxx", // 钉钉 AppKey + "clientSecret": "your_secret_here", // 钉钉 AppSecret + "gatewayToken": "", // 可选:Gateway 认证 token, openclaw.json配置中 gateway.auth.token 的值 + "gatewayPassword": "", // 可选:Gateway 认证 password(与 token 二选一) + "sessionTimeout": 1800000, // ⚠️ 已废弃,请使用 Gateway 的 session.reset.idleMinutes 配置 + "separateSessionByConversation": true, // 可选:是否按单聊/群聊/群区分 session(默认:true) + "groupSessionScope": "group", // 可选:群聊会话隔离策略,group=群共享,group_sender=群内用户独立(默认:group) + "sharedMemoryAcrossConversations": false, // 可选:是否在不同会话间共享记忆;false 时群聊与私聊、不同群记忆隔离(默认:false) + "asyncMode": false, // 可选:异步模式,立即回执用户消息,后台处理并推送结果(默认:false) + "ackText": "🫡 任务已接收" // 可选:异步模式下的回执消息文本(默认:'🫡 任务已接收,处理中...') + } + }, + "gateway": { // gateway通常是已有的节点,配置时注意把http部分追加到已有节点下 + "http": { + "endpoints": { + "chatCompletions": { + "enabled": true + } + } + } + } +} +``` + +或者在 OpenClaw Dashboard 页面配置: + +image + +### 3. 重启 Gateway + +```bash +openclaw gateway restart +``` + +验证: + +```bash +openclaw plugins list # 确认 dingtalk-connector 已加载 +``` + +## 创建钉钉机器人 + +1. 打开 [钉钉开放平台](https://open.dingtalk.com/) +2. 进入 **应用开发** → **企业内部开发** → 创建应用 +3. 添加 **机器人** 能力,消息接收模式选择 **Stream 模式** +4. 开通权限: + - `Card.Streaming.Write` - AI Card 流式响应 + - `Card.Instance.Write` - AI Card 实例写入 + - `qyapi_robot_sendmsg` - 主动发送消息 + - 如需使用文档 API 功能,还需开通文档相关权限 +5. **发布应用**,记录 **AppKey** 和 **AppSecret** + +## 配置参考 + +| 配置项 | 环境变量 | 说明 | +|--------|----------|------| +| `clientId` | `DINGTALK_CLIENT_ID` | 钉钉 AppKey | +| `clientSecret` | `DINGTALK_CLIENT_SECRET` | 钉钉 AppSecret | +| `gatewayToken` | `OPENCLAW_GATEWAY_TOKEN` | Gateway 认证 token(可选) | +| `gatewayPassword` | — | Gateway 认证 password(可选,与 token 二选一) | +| `sessionTimeout` | — | ⚠️ 已废弃,请使用 Gateway 的 [`session.reset.idleMinutes`](https://docs.openclaw.ai/gateway/configuration) 配置 | +| `separateSessionByConversation` | — | 是否按单聊/群聊/群区分 session(默认:true) | +| `groupSessionScope` | — | 群聊会话隔离策略(仅当 separateSessionByConversation=true 时生效):`group`=群共享,`group_sender`=群内用户独立(默认:group) | +| `sharedMemoryAcrossConversations` | — | 是否在不同会话间共享记忆;false 时群聊与私聊、不同群记忆隔离(默认:false) | +| `asyncMode` | — | 异步模式,立即回执用户消息,后台处理并推送结果(默认:false) | +| `ackText` | — | 异步模式下的回执消息文本(默认:'🫡 任务已接收,处理中...') | + +## 会话与记忆隔离 + +连接器支持按单聊、群聊、不同群分别维护独立会话和记忆,确保同一用户在不同场景下的对话上下文互不干扰。 + +### 会话隔离(separateSessionByConversation) + +- **默认开启**(`true`):单聊、群聊、不同群各自拥有独立的 session +- **关闭**(`false`):按用户维度维护 session,不区分单聊/群聊(兼容旧行为) + +### 群聊会话隔离(groupSessionScope) + +仅当 `separateSessionByConversation=true` 时生效: + +- **`group`**(默认):整个群共享一个会话,群内所有用户共用同一个对话上下文 +- **`group_sender`**:群内每个用户独立会话,不同用户的对话上下文互不干扰 + +### 记忆隔离(sharedMemoryAcrossConversations) + +- **默认关闭**(`false`):不同群聊、群聊与私聊之间的记忆隔离,AI 不会混淆不同场景下的对话历史 +- **开启**(`true`):单 Agent 场景下,同一用户在不同会话间共享记忆 + +### 适用场景 + +- ✅ 同一机器人在多个群中服务,希望每个群的对话互不干扰 +- ✅ 用户既在私聊也在群聊中使用机器人,希望私聊与群聊上下文分离 +- ✅ 群内所有成员共享对话上下文(默认 `groupSessionScope: "group"`) +- ✅ 群内每个用户独立对话(设置 `groupSessionScope: "group_sender"`) +- ✅ 需要跨会话共享记忆时,可设置 `sharedMemoryAcrossConversations: true` + +## 异步模式 + +异步模式允许连接器立即回执用户消息,然后在后台处理任务,最后主动推送最终结果作为独立消息。这种模式特别适合处理耗时较长的任务,可以给用户更好的交互体验。 + +### 启用异步模式 + +在配置中设置 `asyncMode: true`: + +```json5 +{ + "channels": { + "dingtalk-connector": { + "enabled": true, + "clientId": "dingxxxxxxxxx", + "clientSecret": "your_secret_here", + "asyncMode": true, // 启用异步模式 + "ackText": "🫡 任务已接收" // 可选:自定义回执消息 + } + } +} +``` + +### 工作流程 + +1. **立即回执** - 用户发送消息后,连接器立即发送回执消息(默认:`🫡 任务已接收,处理中...`) +2. **后台处理** - 连接器在后台调用 Gateway 处理任务,支持文件附件和图片 +3. **推送结果** - 处理完成后,连接器主动推送最终结果作为独立消息 + +### 适用场景 + +- ✅ 处理耗时较长的任务(如文档分析、代码生成等) +- ✅ 需要给用户即时反馈的场景 +- ✅ 希望将处理过程和结果分离的场景 + +### 注意事项 + +- 异步模式下不支持 AI Card 流式响应(因为结果通过主动推送发送) +- 异步模式支持文件附件和图片处理 +- 错误信息也会通过主动推送发送给用户 + +## 多 Agent 配置 + +钉钉 Connector 支持多 Agent 模式,可以配置多个钉钉机器人连接到不同的 Agent,实现角色分工和专业化服务。 + +### 核心配置 + +在 `~/.openclaw/openclaw.json` 中配置多个钉钉账号和 Agent 绑定: + +```json5 +{ + "channels": { + "dingtalk-connector": { + "enabled": true, + "accounts": { + "bot1": { + "enabled": true, + "clientId": "ding_bot1_app_key", + "clientSecret": "bot1_secret" + }, + "bot2": { + "enabled": true, + "clientId": "ding_bot2_app_key", + "clientSecret": "bot2_secret" + } + } + } + }, + "bindings": [ + { + "agentId": "ding-bot1", + "match": { + "channel": "dingtalk-connector", + "accountId": "bot1" + } + }, + { + "agentId": "ding-bot2", + "match": { + "channel": "dingtalk-connector", + "accountId": "bot2" + } + } + ] +} +``` + +### 基于单聊/群聊的路由(peer.kind) + +连接器支持根据会话类型(单聊/群聊)将消息路由到不同的 Agent。这对于以下场景非常有用: + +- **安全隔离**:群聊使用受限功能的 Agent,单聊使用完整功能的 Agent +- **多角色支持**:不同用户或会话类型分配不同的 Agent +- **成本优化**:普通用户路由到低成本模型,VIP 用户使用高端模型 + +#### 配置示例 + +```json5 +{ + "bindings": [ + // 场景1:特定用户的单聊 → main agent(完整功能) + { + "agentId": "main", + "match": { + "channel": "dingtalk-connector", + "peer": { + "kind": "direct", + "id": "YOUR_VIP_USER_ID" + } + } + }, + // 场景2:所有群聊 → guest agent(受限功能) + { + "agentId": "guest", + "match": { + "channel": "dingtalk-connector", + "peer": { + "kind": "group", + "id": "*" + } + } + }, + // 场景3:其他单聊 → guest agent(受限功能) + { + "agentId": "guest", + "match": { + "channel": "dingtalk-connector", + "peer": { + "kind": "direct", + "id": "*" + } + } + } + ] +} +``` + +#### peer.kind 配置说明 + +| 字段 | 类型 | 说明 | +|------|------|------| +| `peer.kind` | `'direct'` \| `'group'` | 会话类型:`direct` 表示单聊,`group` 表示群聊 | +| `peer.id` | `string` | 发送者 ID(单聊)或 `*` 通配符匹配所有 | + +#### 匹配优先级 + +bindings 按以下优先级匹配(从高到低): + +1. **peer.kind + peer.id 精确匹配**:指定会话类型和具体用户 ID +2. **peer.kind + peer.id='*' 通配匹配**:指定会话类型,匹配所有用户 +3. **仅 peer.kind 匹配**:只指定会话类型(无 peer.id) +4. **accountId 匹配**:按钉钉账号路由 +5. **channel 匹配**:仅指定 channel +6. **默认 fallback**:使用 `main` agent + +### 官方文档 + +详细的配置指南和架构说明,请参考 OpenClaw 官方文档: + +- [OpenClaw 多 Agent 架构配置指南](https://gist.github.com/smallnest/c5c13482740fd179e40070e620f66a52) + + +## 会话命令 + +用户可以发送以下命令开启新会话(清空对话历史): + +- `/new`、`/reset`、`/clear` +- `新会话`、`重新开始`、`清空对话` + +## 富媒体接收 + +### 图片消息支持 + +连接器支持接收和处理钉钉中的图片消息: + +- **JPEG 图片** - 直接发送的 JPEG 图片会自动下载到 `~/.openclaw/workspace/media/inbound/` 目录 +- **PNG 图片** - 富文本消息中包含的 PNG 图片会自动提取 URL 和 downloadCode 并下载 +- **视觉模型集成** - 下载的图片会自动传递给视觉模型,AI 可以识别和分析图片内容 + +### 媒体文件存储 + +所有接收的媒体文件会保存在: + +```bash +~/.openclaw/workspace/media/inbound/ +``` + +文件命名格式:`openclaw-media-{timestamp}.{ext}` + +查看媒体目录: + +```bash +ls -la ~/.openclaw/workspace/media/inbound/ +``` + +## 文件附件提取 + +连接器支持自动提取和处理钉钉消息中的文件附件: + +### 支持的文件类型 + +| 文件类型 | 处理方式 | 说明 | +|---------|---------|------| +| `.docx` | 通过 `mammoth` 解析 | 提取 Word 文档中的文本内容,注入到 AI 上下文 | +| `.pdf` | 通过 `pdf-parse` 解析 | 提取 PDF 文档中的文本内容,注入到 AI 上下文 | +| `.txt`、`.md`、`.json` 等 | 直接读取 | 纯文本文件内容直接读取并注入到消息中 | +| `.xlsx`、`.pptx`、`.zip` 等 | 保存到磁盘 | 二进制文件保存到磁盘,文件路径和名称会在消息中报告 | + +### 使用方式 + +直接在钉钉中发送文件附件,连接器会自动: +1. 下载文件到本地 +2. 根据文件类型进行解析或保存 +3. 将文本内容注入到 AI 对话上下文中 + +## 钉钉文档 API + +连接器提供了丰富的钉钉文档操作能力,可在 OpenClaw Agent 中调用: + +### 创建文档 + +```javascript +dingtalk-connector.docs.create({ + spaceId: "your-space-id", + title: "测试文档", + content: "# 测试内容" +}) +``` + +### 追加内容 + +```javascript +dingtalk-connector.docs.append({ + docId: "your-doc-id", + markdownContent: "\n## 追加的内容" +}) +``` + +### 搜索文档 + +```javascript +dingtalk-connector.docs.search({ + keyword: "搜索关键词" +}) +``` + +### 列举文档 + +```javascript +dingtalk-connector.docs.list({ + spaceId: "your-space-id" +}) +``` + +## 多 Agent 路由支持 + +连接器支持同时连接多个 Agent,实现多 Agent 会话隔离: + +- **独立会话空间** - 每个 Agent 拥有独立的会话上下文,互不干扰 +- **灵活路由** - 可根据不同场景将请求路由到不同的 Agent +- **向后兼容** - 单 Agent 场景下功能完全兼容,无需额外配置 + +## 项目结构 + +``` +dingtalk-openclaw-connector/ +├── plugin.ts # 插件入口 +├── openclaw.plugin.json # 插件清单 +├── package.json # npm 依赖 +└── LICENSE +``` + +## 常见问题 + +### Q: 出现 405 错误 + +image + +需要在 `~/.openclaw/openclaw.json` 中启用 chatCompletions 端点: + +```json5 +{ + "gateway": { // gateway通常是已有的节点,配置时注意把http部分追加到已有节点下 + "http": { + "endpoints": { + "chatCompletions": { + "enabled": true + } + } + } + } +} +``` + +### Q: 出现 401 错误 + +image + +检查 `~/.openclaw/openclaw.json` 中的gateway.auth鉴权的 token/password 是否正确: + +image + +### Q: 钉钉机器人无响应 + +1. 确认 Gateway 正在运行:`curl http://127.0.0.1:18789/health` +2. 确认机器人配置为 **Stream 模式**(非 Webhook) +3. 确认 AppKey/AppSecret 正确 + +### Q: AI Card 不显示,只有纯文本 + +需要开通权限 `Card.Streaming.Write` 和 `Card.Instance.Write`,并重新发布应用。 + +### Q: 升级后出现插件加载异常或配置不生效 + +由于官方两次更名(Clawdbot → Moltbot → OpenClaw),旧版本(0.4.0 以下)的 connector 插件可能与新版本不兼容。建议按以下步骤处理: + +1. 先检查 `~/.openclaw/openclaw.json`(或旧版的 `~/.clawdbot/clawdbot.json`、`~/.moltbot/moltbot.json`),如果其中存在 dingtalk 相关的 JSON 节点(如 `channels.dingtalk`、`plugins.entries.dingtalk` 等),请将这些节点全部删除。 + +2. 然后清除旧插件并重新安装: + +```bash +rm -rf ~/.clawdbot/extensions/dingtalk-connector +rm -rf ~/.moltbot/extensions/dingtalk-connector +rm -rf ~/.openclaw/extensions/dingtalk-connector +openclaw plugins install @dingtalk-real-ai/dingtalk-connector +``` + +### Q: 图片不显示 + +1. 确认 `enableMediaUpload: true`(默认开启) +2. 检查日志 `[DingTalk][Media]` 相关输出 +3. 确认钉钉应用有图片上传权限 + +### Q: 图片消息无法识别 + +1. 检查图片是否成功下载到 `~/.openclaw/workspace/media/inbound/` 目录 +2. 确认 Gateway 配置的模型支持视觉能力(vision model) +3. 查看日志中是否有图片下载或处理的错误信息 + +### Q: 文件附件无法解析 + +1. **Word 文档(.docx)**:确认已安装 `mammoth` 依赖包 +2. **PDF 文档**:确认已安装 `pdf-parse` 依赖包 +3. 检查文件是否成功下载,查看日志中的文件处理信息 +4. 对于不支持的二进制文件,会保存到磁盘并在消息中报告文件路径 + +### Q: 钉钉文档 API 调用失败 + +1. 确认钉钉应用已开通文档相关权限 +2. 检查 `spaceId`、`docId` 等参数是否正确 +3. 确认 API 调用时的认证信息(AppKey/AppSecret)有效 +4. 注意:读取文档功能需要 MCP 提供相应的 tool,当前版本暂不支持 + +### Q: 多 Agent 路由如何配置 + +多 Agent 路由功能会自动处理,无需额外配置。连接器会根据配置自动管理多个 Agent 的会话隔离。如需自定义路由逻辑,请参考插件源码中的路由实现。 + +## 依赖 + +| 包 | 用途 | +|----|------| +| `dingtalk-stream` | 钉钉 Stream 协议客户端 | +| `axios` | HTTP 客户端 | +| `mammoth` | Word 文档(.docx)解析 | +| `pdf-parse` | PDF 文档解析 | + +# 方案二:钉钉 DEAP Agent 集成 + +通过将钉钉 [DEAP](https://deap.dingtalk.com) Agent 与 [OpenClaw](https://openclaw.ai) Gateway 连接,实现自然语言驱动的本地设备操作能力。 + +## 核心功能 + +- ✅ **自然语言交互** - 用户在钉钉对话框中输入自然语言指令(如"帮我查找桌面上的 PDF 文件"),Agent 将自动解析并执行相应操作 +- ✅ **内网穿透机制** - 专为本地设备无公网 IP 场景设计,通过 Connector 客户端建立稳定的内外网通信隧道 +- ✅ **跨平台兼容** - 提供 Windows、macOS 和 Linux 系统的原生二进制执行文件,确保各平台下的顺畅运行 + +## 系统架构 + +该方案采用分层架构模式,包含三个核心组件: + +1. **OpenClaw Gateway** - 部署于本地设备,提供标准化 HTTP 接口,负责接收并处理来自云端的操作指令,调动 OpenClaw 引擎执行具体任务 +2. **DingTalk OpenClaw Connector** - 运行于本地环境,构建本地与云端的通信隧道,解决内网设备无公网 IP 的问题 +3. **DingTalk DEAP MCP** - 作为 DEAP Agent 的扩展能力模块,负责将用户自然语言请求经由云端隧道转发至 OpenClaw Gateway + +```mermaid +graph LR + subgraph "钉钉 App" + A["用户与 Agent 对话"] --> B["DEAP Agent"] + end + + subgraph "本地环境" + D["DingTalk OpenClaw Connector"] --> C["OpenClaw Gateway"] + C --> E["PC 操作执行"] + end + + B -.-> D +``` + +## 实施指南 + +### 第一步:部署本地环境 + +确认本地设备已成功安装并启动 OpenClaw Gateway,默认监听地址为 `127.0.0.1:18789`: + +```bash +openclaw gateway start +``` + +#### 配置 Gateway 参数 + +1. 访问 [配置页面](http://127.0.0.1:18789/config) +2. 在 **Auth 标签页** 中设置 Gateway Token 并妥善保存: + + Gateway Auth 配置界面 + +3. 切换至 **Http 标签页**,启用 `OpenAI Chat Completions Endpoint` 功能: + + Gateway Http 配置界面 + +4. 点击右上角 `Save` 按钮完成配置保存 + +### 第二步:获取必要参数 + +#### 获取 corpId + +登录 [钉钉开发者平台](https://open-dev.dingtalk.com) 查看企业 CorpId: + +钉钉开发者平台获取 corpId + +#### 获取 apiKey + +登录 [钉钉 DEAP 平台](https://deap.dingtalk.com),在 **安全与权限** → **API-Key 管理** 页面创建新的 API Key: + +钉钉 DEAP 平台 API-Key 管理 + +### 第三步:启动 Connector 客户端 + +1. 从 [Releases](https://github.com/hoskii/dingtalk-openclaw-connector/releases/tag/v0.0.1) 页面下载适配您操作系统的安装包 +2. 解压并运行 Connector(以 macOS 为例): + + ```bash + unzip connector-mac.zip + ./connector-darwin -deapCorpId YOUR_CORP_ID -deapApiKey YOUR_API_KEY + ``` + +### 第四步:配置 DEAP Agent + +1. 登录 [钉钉 DEAP 平台](https://deap.dingtalk.com),创建新的智能体: + + 新建智能体界面 + +2. 在技能管理页面,搜索并集成 OpenClaw 技能: + + 添加 OpenClaw 技能 + +3. 配置技能参数: + + | 参数 | 来源 | 说明 | + |------|------|------| + | apikey | 第二步获取 | DEAP 平台 API Key | + | apihost | 默认值 | 通常为 `127.0.0.1:18789`,在Windows环境下可能需要配置为 `localhost:18789` 才能正常工作 | + | gatewayToken | 第一步获取 | Gateway 配置的认证令牌 | + + 配置 OpenClaw 技能参数 + +4. 发布 Agent: + + 发布 Agent + +### 第五步:开始使用 + +1. 在钉钉 App 中搜索并找到您创建的 Agent: + + 搜索 Agent + +2. 开始自然语言对话体验: + + 与 Agent 对话 + +## License + +[MIT](LICENSE) diff --git a/.flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/bun.lock b/.flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/bun.lock new file mode 100644 index 0000000..e566526 --- /dev/null +++ b/.flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/bun.lock @@ -0,0 +1,154 @@ +{ + "lockfileVersion": 1, + "configVersion": 1, + "workspaces": { + "": { + "name": "@dingtalk-real-ai/dingtalk-connector", + "dependencies": { + "@ffmpeg-installer/ffmpeg": "^1.1.0", + "axios": "^1.6.0", + "dingtalk-stream": "^2.1.4", + "fluent-ffmpeg": "^2.1.3", + "mammoth": "^1.8.0", + "pdf-parse": "^1.1.1", + }, + }, + }, + "packages": { + "@ffmpeg-installer/darwin-arm64": ["@ffmpeg-installer/darwin-arm64@4.1.5", "", { "os": "darwin", "cpu": "arm64" }, "sha512-hYqTiP63mXz7wSQfuqfFwfLOfwwFChUedeCVKkBtl/cliaTM7/ePI9bVzfZ2c+dWu3TqCwLDRWNSJ5pqZl8otA=="], + + "@ffmpeg-installer/darwin-x64": ["@ffmpeg-installer/darwin-x64@4.1.0", "", { "os": "darwin", "cpu": "x64" }, "sha512-Z4EyG3cIFjdhlY8wI9aLUXuH8nVt7E9SlMVZtWvSPnm2sm37/yC2CwjUzyCQbJbySnef1tQwGG2Sx+uWhd9IAw=="], + + "@ffmpeg-installer/ffmpeg": ["@ffmpeg-installer/ffmpeg@1.1.0", "", { "optionalDependencies": { "@ffmpeg-installer/darwin-arm64": "4.1.5", "@ffmpeg-installer/darwin-x64": "4.1.0", "@ffmpeg-installer/linux-arm": "4.1.3", "@ffmpeg-installer/linux-arm64": "4.1.4", "@ffmpeg-installer/linux-ia32": "4.1.0", "@ffmpeg-installer/linux-x64": "4.1.0", "@ffmpeg-installer/win32-ia32": "4.1.0", "@ffmpeg-installer/win32-x64": "4.1.0" } }, "sha512-Uq4rmwkdGxIa9A6Bd/VqqYbT7zqh1GrT5/rFwCwKM70b42W5gIjWeVETq6SdcL0zXqDtY081Ws/iJWhr1+xvQg=="], + + "@ffmpeg-installer/linux-arm": ["@ffmpeg-installer/linux-arm@4.1.3", "", { "os": "linux", "cpu": "arm" }, "sha512-NDf5V6l8AfzZ8WzUGZ5mV8O/xMzRag2ETR6+TlGIsMHp81agx51cqpPItXPib/nAZYmo55Bl2L6/WOMI3A5YRg=="], + + "@ffmpeg-installer/linux-arm64": ["@ffmpeg-installer/linux-arm64@4.1.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-dljEqAOD0oIM6O6DxBW9US/FkvqvQwgJ2lGHOwHDDwu/pX8+V0YsDL1xqHbj1DMX/+nP9rxw7G7gcUvGspSoKg=="], + + "@ffmpeg-installer/linux-ia32": ["@ffmpeg-installer/linux-ia32@4.1.0", "", { "os": "linux", "cpu": "ia32" }, "sha512-0LWyFQnPf+Ij9GQGD034hS6A90URNu9HCtQ5cTqo5MxOEc7Rd8gLXrJvn++UmxhU0J5RyRE9KRYstdCVUjkNOQ=="], + + "@ffmpeg-installer/linux-x64": ["@ffmpeg-installer/linux-x64@4.1.0", "", { "os": "linux", "cpu": "x64" }, "sha512-Y5BWhGLU/WpQjOArNIgXD3z5mxxdV8c41C+U15nsE5yF8tVcdCGet5zPs5Zy3Ta6bU7haGpIzryutqCGQA/W8A=="], + + "@ffmpeg-installer/win32-ia32": ["@ffmpeg-installer/win32-ia32@4.1.0", "", { "os": "win32", "cpu": "ia32" }, "sha512-FV2D7RlaZv/lrtdhaQ4oETwoFUsUjlUiasiZLDxhEUPdNDWcH1OU9K1xTvqz+OXLdsmYelUDuBS/zkMOTtlUAw=="], + + "@ffmpeg-installer/win32-x64": ["@ffmpeg-installer/win32-x64@4.1.0", "", { "os": "win32", "cpu": "x64" }, "sha512-Drt5u2vzDnIONf4ZEkKtFlbvwj6rI3kxw1Ck9fpudmtgaZIHD4ucsWB2lCZBXRxJgXR+2IMSti+4rtM4C4rXgg=="], + + "@xmldom/xmldom": ["@xmldom/xmldom@0.8.11", "", {}, "sha512-cQzWCtO6C8TQiYl1ruKNn2U6Ao4o4WBBcbL61yJl84x+j5sOWWFU9X7DpND8XZG3daDppSsigMdfAIl2upQBRw=="], + + "argparse": ["argparse@1.0.10", "", { "dependencies": { "sprintf-js": "~1.0.2" } }, "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg=="], + + "async": ["async@0.2.10", "", {}, "sha512-eAkdoKxU6/LkKDBzLpT+t6Ff5EtfSF4wx1WfJiPEEV7WNLnDaRXk0oVysiEPm262roaachGexwUv94WhSgN5TQ=="], + + "asynckit": ["asynckit@0.4.0", "", {}, "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="], + + "axios": ["axios@1.13.6", "", { "dependencies": { "follow-redirects": "^1.15.11", "form-data": "^4.0.5", "proxy-from-env": "^1.1.0" } }, "sha512-ChTCHMouEe2kn713WHbQGcuYrr6fXTBiu460OTwWrWob16g1bXn4vtz07Ope7ewMozJAnEquLk5lWQWtBig9DQ=="], + + "base64-js": ["base64-js@1.5.1", "", {}, "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA=="], + + "bluebird": ["bluebird@3.4.7", "", {}, "sha512-iD3898SR7sWVRHbiQv+sHUtHnMvC1o3nW5rAcqnq3uOn07DSAppZYUkIGslDz6gXC7HfunPe7YVBgoEJASPcHA=="], + + "call-bind-apply-helpers": ["call-bind-apply-helpers@1.0.2", "", { "dependencies": { "es-errors": "^1.3.0", "function-bind": "^1.1.2" } }, "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ=="], + + "combined-stream": ["combined-stream@1.0.8", "", { "dependencies": { "delayed-stream": "~1.0.0" } }, "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg=="], + + "core-util-is": ["core-util-is@1.0.3", "", {}, "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ=="], + + "debug": ["debug@4.4.3", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA=="], + + "delayed-stream": ["delayed-stream@1.0.0", "", {}, "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="], + + "dingbat-to-unicode": ["dingbat-to-unicode@1.0.1", "", {}, "sha512-98l0sW87ZT58pU4i61wa2OHwxbiYSbuxsCBozaVnYX2iCnr3bLM3fIes1/ej7h1YdOKuKt/MLs706TVnALA65w=="], + + "dingtalk-stream": ["dingtalk-stream@2.1.4", "", { "dependencies": { "axios": "^1.4.0", "debug": "^4.3.4", "ws": "^8.13.0" } }, "sha512-rgQbXLGWfASuB9onFcqXTnRSj4ZotimhBOnzrB4kS19AaU9lshXiuofs1GAYcKh5uzPWCAuEs3tMtiadTQWP4A=="], + + "duck": ["duck@0.1.12", "", { "dependencies": { "underscore": "^1.13.1" } }, "sha512-wkctla1O6VfP89gQ+J/yDesM0S7B7XLXjKGzXxMDVFg7uEn706niAtyYovKbyq1oT9YwDcly721/iUWoc8MVRg=="], + + "dunder-proto": ["dunder-proto@1.0.1", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "es-errors": "^1.3.0", "gopd": "^1.2.0" } }, "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A=="], + + "es-define-property": ["es-define-property@1.0.1", "", {}, "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g=="], + + "es-errors": ["es-errors@1.3.0", "", {}, "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw=="], + + "es-object-atoms": ["es-object-atoms@1.1.1", "", { "dependencies": { "es-errors": "^1.3.0" } }, "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA=="], + + "es-set-tostringtag": ["es-set-tostringtag@2.1.0", "", { "dependencies": { "es-errors": "^1.3.0", "get-intrinsic": "^1.2.6", "has-tostringtag": "^1.0.2", "hasown": "^2.0.2" } }, "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA=="], + + "fluent-ffmpeg": ["fluent-ffmpeg@2.1.3", "", { "dependencies": { "async": "^0.2.9", "which": "^1.1.1" } }, "sha512-Be3narBNt2s6bsaqP6Jzq91heDgOEaDCJAXcE3qcma/EJBSy5FB4cvO31XBInuAuKBx8Kptf8dkhjK0IOru39Q=="], + + "follow-redirects": ["follow-redirects@1.15.11", "", {}, "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ=="], + + "form-data": ["form-data@4.0.5", "", { "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", "es-set-tostringtag": "^2.1.0", "hasown": "^2.0.2", "mime-types": "^2.1.12" } }, "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w=="], + + "function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="], + + "get-intrinsic": ["get-intrinsic@1.3.0", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.2", "es-define-property": "^1.0.1", "es-errors": "^1.3.0", "es-object-atoms": "^1.1.1", "function-bind": "^1.1.2", "get-proto": "^1.0.1", "gopd": "^1.2.0", "has-symbols": "^1.1.0", "hasown": "^2.0.2", "math-intrinsics": "^1.1.0" } }, "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ=="], + + "get-proto": ["get-proto@1.0.1", "", { "dependencies": { "dunder-proto": "^1.0.1", "es-object-atoms": "^1.0.0" } }, "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g=="], + + "gopd": ["gopd@1.2.0", "", {}, "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg=="], + + "has-symbols": ["has-symbols@1.1.0", "", {}, "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ=="], + + "has-tostringtag": ["has-tostringtag@1.0.2", "", { "dependencies": { "has-symbols": "^1.0.3" } }, "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw=="], + + "hasown": ["hasown@2.0.2", "", { "dependencies": { "function-bind": "^1.1.2" } }, "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ=="], + + "immediate": ["immediate@3.0.6", "", {}, "sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ=="], + + "inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="], + + "isarray": ["isarray@1.0.0", "", {}, "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ=="], + + "isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="], + + "jszip": ["jszip@3.10.1", "", { "dependencies": { "lie": "~3.3.0", "pako": "~1.0.2", "readable-stream": "~2.3.6", "setimmediate": "^1.0.5" } }, "sha512-xXDvecyTpGLrqFrvkrUSoxxfJI5AH7U8zxxtVclpsUtMCq4JQ290LY8AW5c7Ggnr/Y/oK+bQMbqK2qmtk3pN4g=="], + + "lie": ["lie@3.3.0", "", { "dependencies": { "immediate": "~3.0.5" } }, "sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ=="], + + "lop": ["lop@0.4.2", "", { "dependencies": { "duck": "^0.1.12", "option": "~0.2.1", "underscore": "^1.13.1" } }, "sha512-RefILVDQ4DKoRZsJ4Pj22TxE3omDO47yFpkIBoDKzkqPRISs5U1cnAdg/5583YPkWPaLIYHOKRMQSvjFsO26cw=="], + + "mammoth": ["mammoth@1.11.0", "", { "dependencies": { "@xmldom/xmldom": "^0.8.6", "argparse": "~1.0.3", "base64-js": "^1.5.1", "bluebird": "~3.4.0", "dingbat-to-unicode": "^1.0.1", "jszip": "^3.7.1", "lop": "^0.4.2", "path-is-absolute": "^1.0.0", "underscore": "^1.13.1", "xmlbuilder": "^10.0.0" }, "bin": { "mammoth": "bin/mammoth" } }, "sha512-BcEqqY/BOwIcI1iR5tqyVlqc3KIaMRa4egSoK83YAVrBf6+yqdAAbtUcFDCWX8Zef8/fgNZ6rl4VUv+vVX8ddQ=="], + + "math-intrinsics": ["math-intrinsics@1.1.0", "", {}, "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g=="], + + "mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="], + + "mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="], + + "ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="], + + "node-ensure": ["node-ensure@0.0.0", "", {}, "sha512-DRI60hzo2oKN1ma0ckc6nQWlHU69RH6xN0sjQTjMpChPfTYvKZdcQFfdYK2RWbJcKyUizSIy/l8OTGxMAM1QDw=="], + + "option": ["option@0.2.4", "", {}, "sha512-pkEqbDyl8ou5cpq+VsnQbe/WlEy5qS7xPzMS1U55OCG9KPvwFD46zDbxQIj3egJSFc3D+XhYOPUzz49zQAVy7A=="], + + "pako": ["pako@1.0.11", "", {}, "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw=="], + + "path-is-absolute": ["path-is-absolute@1.0.1", "", {}, "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg=="], + + "pdf-parse": ["pdf-parse@1.1.4", "", { "dependencies": { "node-ensure": "^0.0.0" } }, "sha512-XRIRcLgk6ZnUbsHsYXExMw+krrPE81hJ6FQPLdBNhhBefqIQKXu/WeTgNBGSwPrfU0v+UCEwn7AoAUOsVKHFvQ=="], + + "process-nextick-args": ["process-nextick-args@2.0.1", "", {}, "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag=="], + + "proxy-from-env": ["proxy-from-env@1.1.0", "", {}, "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="], + + "readable-stream": ["readable-stream@2.3.8", "", { "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", "isarray": "~1.0.0", "process-nextick-args": "~2.0.0", "safe-buffer": "~5.1.1", "string_decoder": "~1.1.1", "util-deprecate": "~1.0.1" } }, "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA=="], + + "safe-buffer": ["safe-buffer@5.1.2", "", {}, "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="], + + "setimmediate": ["setimmediate@1.0.5", "", {}, "sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA=="], + + "sprintf-js": ["sprintf-js@1.0.3", "", {}, "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g=="], + + "string_decoder": ["string_decoder@1.1.1", "", { "dependencies": { "safe-buffer": "~5.1.0" } }, "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg=="], + + "underscore": ["underscore@1.13.8", "", {}, "sha512-DXtD3ZtEQzc7M8m4cXotyHR+FAS18C64asBYY5vqZexfYryNNnDc02W4hKg3rdQuqOYas1jkseX0+nZXjTXnvQ=="], + + "util-deprecate": ["util-deprecate@1.0.2", "", {}, "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="], + + "which": ["which@1.3.1", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "which": "./bin/which" } }, "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ=="], + + "ws": ["ws@8.19.0", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg=="], + + "xmlbuilder": ["xmlbuilder@10.1.1", "", {}, "sha512-OyzrcFLL/nb6fMGHbiRDuPup9ljBycsdCypwuyg5AAHvyWzGfChJpCXMG88AGTIMFhGZ9RccFN1e6lhg3hkwKg=="], + } +} diff --git a/.flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/openclaw.plugin.json b/.flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/openclaw.plugin.json new file mode 100644 index 0000000..29a247e --- /dev/null +++ b/.flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/openclaw.plugin.json @@ -0,0 +1,15 @@ +{ + "id": "dingtalk-connector", + "name": "DingTalk Channel", + "version": "0.7.4", + "description": "DingTalk (钉钉) messaging channel via Stream mode with AI Card streaming", + "author": "DingTalk Real Team", + "channels": ["dingtalk-connector"], + "configSchema": { + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { "type": "boolean", "default": true } + } + } +} diff --git a/.flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/package.json b/.flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/package.json new file mode 100644 index 0000000..8c54b9a --- /dev/null +++ b/.flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/package.json @@ -0,0 +1,60 @@ +{ + "name": "@dingtalk-real-ai/dingtalk-connector", + "version": "0.7.5", + "description": "DingTalk (钉钉) channel connector — Stream mode with AI Card streaming", + "main": "plugin.ts", + "type": "module", + "scripts": { + "build": "echo 'No build needed - jiti loads TS at runtime'", + "lint": "echo 'Lint check skipped'", + "lint:fix": "echo 'Lint fix skipped'", + "test": "echo 'Tests skipped'", + "test:watch": "echo 'Tests skipped'", + "start:runner": "tsx ../runner.ts", + "type-check": "npx tsc --noEmit", + "version:check": "echo 'Version check skipped'", + "release:prepare": "echo 'Release prepare skipped'", + "release:publish": "npm publish --access public", + "release:verify": "npm view @dingtalk-real-ai/dingtalk-connector version", + "clean": "rm -rf node_modules package-lock.json", + "install:fresh": "npm run clean && npm install", + "dev": "echo 'Run: openclaw start'", + "validate": "npm run lint && npm run type-check && npm run version:check" + }, + "keywords": [ + "dingtalk", + "channel", + "stream", + "ai-card", + "connector" + ], + "author": "DingTalk Real Team", + "license": "MIT", + "repository": { + "type": "git", + "url": "git+https://github.com/DingTalk-Real-AI/dingtalk-openclaw-connector.git" + }, + "homepage": "https://github.com/DingTalk-Real-AI/dingtalk-openclaw-connector#readme", + "bugs": "https://github.com/DingTalk-Real-AI/dingtalk-openclaw-connector/issues", + "publishConfig": { + "access": "public" + }, + "dependencies": { + "@ffmpeg-installer/ffmpeg": "^1.1.0", + "axios": "1.14.0", + "dingtalk-stream": "^2.1.4", + "fluent-ffmpeg": "^2.1.3", + "mammoth": "^1.8.0", + "pdf-parse": "^1.1.1", + "tsx": "^4.20.5" + }, + "openclaw": { + "extensions": [ + "./plugin.ts" + ], + "channels": [ + "dingtalk-connector" + ], + "installDependencies": true + } +} diff --git a/.flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/plugin.ts b/.flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/plugin.ts new file mode 100644 index 0000000..51ca417 --- /dev/null +++ b/.flocks/plugins/channels/dingtalk/dingtalk-openclaw-connector/plugin.ts @@ -0,0 +1,3867 @@ +/** + * DingTalk Channel Plugin for Moltbot + * + * 通过钉钉 Stream 模式连接,支持 AI Card 流式响应。 + * 完整接入 Moltbot 消息处理管道。 + */ + +import { DWClient, TOPIC_ROBOT } from 'dingtalk-stream'; +import axios from 'axios'; +import * as fs from 'fs'; +import * as path from 'path'; +import * as os from 'os'; +import type { ClawdbotPluginApi, PluginRuntime, ClawdbotConfig } from 'clawdbot/plugin-sdk'; + +// ============ 常量 ============ + +export const id = 'dingtalk-connector'; + +/** 默认账号 ID,用于标记单账号模式(无 accounts 配置)时的内部标识,映射到 'main' agent */ +const DEFAULT_ACCOUNT_ID = '__default__'; + +let runtime: PluginRuntime | null = null; + +function getRuntime(): PluginRuntime { + if (!runtime) throw new Error('DingTalk runtime not initialized'); + return runtime; +} + +// ============ Session 管理 ============ + +/** 用户会话状态:记录最后活跃时间和当前 session 标识 */ +interface UserSession { + lastActivity: number; + sessionId: string; // 格式: dingtalk-connector: 或 dingtalk-connector:: +} + +/** 用户会话缓存 Map */ +const userSessions = new Map(); + +/** 消息去重缓存 Map - 防止同一消息被重复处理 */ +const processedMessages = new Map(); + +/** 消息去重缓存过期时间(5分钟) */ +const MESSAGE_DEDUP_TTL = 5 * 60 * 1000; + +/** 清理过期的消息去重缓存 */ +function cleanupProcessedMessages(): void { + const now = Date.now(); + for (const [msgId, timestamp] of processedMessages.entries()) { + if (now - timestamp > MESSAGE_DEDUP_TTL) { + processedMessages.delete(msgId); + } + } +} + +/** 检查消息是否已处理过(去重) */ +function isMessageProcessed(messageId: string): boolean { + if (!messageId) return false; + return processedMessages.has(messageId); +} + +/** 标记消息为已处理 */ +function markMessageProcessed(messageId: string): void { + if (!messageId) return; + processedMessages.set(messageId, Date.now()); + // 定期清理(每处理100条消息清理一次) + if (processedMessages.size >= 100) { + cleanupProcessedMessages(); + } +} + +/** 新会话触发命令 */ +const NEW_SESSION_COMMANDS = ['/new', '/reset', '/clear', '新会话', '重新开始', '清空对话']; + +/** 检查消息是否是新会话命令 */ +function isNewSessionCommand(text: string): boolean { + const trimmed = text.trim().toLowerCase(); + return NEW_SESSION_COMMANDS.some(cmd => trimmed === cmd.toLowerCase()); +} + +/** + * OpenClaw 标准会话上下文 + * 遵循 OpenClaw session.dmScope 机制,让 Gateway 根据配置自动处理会话隔离 + */ +interface SessionContext { + channel: 'dingtalk-connector'; + accountId: string; + chatType: 'direct' | 'group'; + peerId: string; + conversationId?: string; + senderName?: string; + groupSubject?: string; +} + +/** + * 构建 OpenClaw 标准会话上下文 + * 遵循 OpenClaw session.dmScope 机制,让 Gateway 根据配置自动处理会话隔离 + * + * @param separateSessionByConversation - 是否按单聊/群聊/群区分 session(默认 true) + * - true: 单聊、群聊、不同群各自拥有独立的 session + * - false: 按用户维度维护 session,不区分单聊/群聊(兼容旧行为) + * @param groupSessionScope - 群聊会话隔离策略(仅当 separateSessionByConversation=true 时生效) + * - 'group': 整个群共享一个会话(默认) + * - 'group_sender': 群内每个用户独立会话 + */ +function buildSessionContext(params: { + accountId: string; + senderId: string; + senderName?: string; + conversationType: string; + conversationId?: string; + groupSubject?: string; + separateSessionByConversation?: boolean; + groupSessionScope?: 'group' | 'group_sender'; +}): SessionContext { + const { accountId, senderId, senderName, conversationType, conversationId, groupSubject, separateSessionByConversation, groupSessionScope } = params; + const isDirect = conversationType === '1'; + + // separateSessionByConversation=false 时,不区分单聊/群聊,按用户维度维护 session + if (separateSessionByConversation === false) { + return { + channel: 'dingtalk-connector', + accountId, + chatType: isDirect ? 'direct' : 'group', + peerId: senderId, // 只用 senderId,不区分会话 + senderName, + }; + } + + // 以下是 separateSessionByConversation=true(默认)的逻辑 + if (isDirect) { + // 单聊:peerId 为发送者 ID,由 OpenClaw Gateway 根据 dmScope 配置处理 + return { + channel: 'dingtalk-connector', + accountId, + chatType: 'direct', + peerId: senderId, + senderName, + }; + } + + // 群聊:根据 groupSessionScope 配置决定会话隔离策略 + if (groupSessionScope === 'group_sender') { + // 群内每个用户独立会话 + return { + channel: 'dingtalk-connector', + accountId, + chatType: 'group', + peerId: `${conversationId}:${senderId}`, + conversationId, + senderName, + groupSubject, + }; + } + + // 默认:整个群共享一个会话 + return { + channel: 'dingtalk-connector', + accountId, + chatType: 'group', + peerId: conversationId || senderId, + conversationId, + senderName, + groupSubject, + }; +} + +// ============ Access Token 缓存 ============ + +let accessToken: string | null = null; +let accessTokenExpiry = 0; + +async function getAccessToken(config: any): Promise { + const now = Date.now(); + if (accessToken && accessTokenExpiry > now + 60_000) { + return accessToken; + } + + const response = await axios.post('https://api.dingtalk.com/v1.0/oauth2/accessToken', { + appKey: config.clientId, + appSecret: config.clientSecret, + }); + + accessToken = response.data.accessToken; + accessTokenExpiry = now + (response.data.expireIn * 1000); + return accessToken!; +} + +// ============ 配置工具 ============ + +function getConfig(cfg: ClawdbotConfig) { + return (cfg?.channels as any)?.['dingtalk-connector'] || {}; +} + +function isConfigured(cfg: ClawdbotConfig): boolean { + const config = getConfig(cfg); + return Boolean(config.clientId && config.clientSecret); +} + +// ============ 钉钉图片上传 ============ + +async function getOapiAccessToken(config: any): Promise { + try { + const resp = await axios.get('https://oapi.dingtalk.com/gettoken', { + params: { appkey: config.clientId, appsecret: config.clientSecret }, + }); + if (resp.data?.errcode === 0) return resp.data.access_token; + return null; + } catch { + return null; + } +} + +/** staffId → unionId 缓存 */ +const unionIdCache = new Map(); + +/** + * 通过 oapi 旧版接口将 staffId 转换为 unionId + */ +async function getUnionId(staffId: string, config: any, log?: any): Promise { + const cached = unionIdCache.get(staffId); + if (cached) return cached; + + try { + const token = await getOapiAccessToken(config); + if (!token) { + log?.error?.('[DingTalk] getUnionId: 无法获取 oapi access_token'); + return null; + } + const resp = await axios.get(`${DINGTALK_OAPI}/user/get`, { + params: { access_token: token, userid: staffId }, + timeout: 10_000, + }); + const unionId = resp.data?.unionid; + if (unionId) { + unionIdCache.set(staffId, unionId); + log?.info?.(`[DingTalk] getUnionId: ${staffId} → ${unionId}`); + return unionId; + } + log?.error?.(`[DingTalk] getUnionId: 响应中无 unionid 字段: ${JSON.stringify(resp.data)}`); + return null; + } catch (err: any) { + log?.error?.(`[DingTalk] getUnionId 失败: ${err.message}`); + return null; + } +} + +function buildMediaSystemPrompt(): string { + return `## 钉钉图片和文件显示规则 + +你正在钉钉中与用户对话。 + +### 一、图片显示 + +显示图片时,直接使用本地文件路径,系统会自动上传处理。 + +**正确方式**: +\`\`\`markdown +![描述](file:///path/to/image.jpg) +![描述](/tmp/screenshot.png) +![描述](/Users/xxx/photo.jpg) +\`\`\` + +**禁止**: +- 不要自己执行 curl 上传 +- 不要猜测或构造 URL +- **不要对路径进行转义(如使用反斜杠 \\ )** + +直接输出本地路径即可,系统会自动上传到钉钉。 + +### 二、视频分享 + +**何时分享视频**: +- ✅ 用户明确要求**分享、发送、上传**视频时 +- ❌ 仅生成视频保存到本地时,**不需要**分享 + +**视频标记格式**: +当需要分享视频时,在回复**末尾**添加: + +\`\`\` +[DINGTALK_VIDEO]{"path":"<本地视频路径>"}[/DINGTALK_VIDEO] +\`\`\` + +**支持格式**:mp4(最大 20MB) + +**重要**: +- 视频大小不得超过 20MB,超过限制时告知用户 +- 仅支持 mp4 格式 +- 系统会自动提取视频时长、分辨率并生成封面 + +### 三、音频分享 + +**何时分享音频**: +- ✅ 用户明确要求**分享、发送、上传**音频/语音文件时 +- ❌ 仅生成音频保存到本地时,**不需要**分享 + +**音频标记格式**: +当需要分享音频时,在回复**末尾**添加: + +\`\`\` +[DINGTALK_AUDIO]{"path":"<本地音频路径>"}[/DINGTALK_AUDIO] +\`\`\` + +**支持格式**:ogg、amr(最大 20MB) + +**重要**: +- 音频大小不得超过 20MB,超过限制时告知用户 +- 系统会自动提取音频时长 + +### 四、文件分享 + +**何时分享文件**: +- ✅ 用户明确要求**分享、发送、上传**文件时 +- ❌ 仅生成文件保存到本地时,**不需要**分享 + +**文件标记格式**: +当需要分享文件时,在回复**末尾**添加: + +\`\`\` +[DINGTALK_FILE]{"path":"<本地文件路径>","fileName":"<文件名>","fileType":"<扩展名>"}[/DINGTALK_FILE] +\`\`\` + +**支持的文件类型**:几乎所有常见格式 + +**重要**:文件大小不得超过 20MB,超过限制时告知用户文件过大。`; +} + +// ============ 图片后处理:自动上传本地图片到钉钉 ============ + +/** + * 匹配 markdown 图片中的本地文件路径(跨平台): + * - ![alt](file:///path/to/image.jpg) + * - ![alt](MEDIA:/var/folders/xxx.jpg) + * - ![alt](attachment:///path.jpg) + * macOS: + * - ![alt](/tmp/xxx.jpg) + * - ![alt](/var/folders/xxx.jpg) + * - ![alt](/Users/xxx/photo.jpg) + * Linux: + * - ![alt](/home/user/photo.jpg) + * - ![alt](/root/photo.jpg) + * Windows: + * - ![alt](C:\Users\xxx\photo.jpg) + * - ![alt](C:/Users/xxx/photo.jpg) + */ +const LOCAL_IMAGE_RE = /!\[([^\]]*)\]\(((?:file:\/\/\/|MEDIA:|attachment:\/\/\/)[^)]+|\/(?:tmp|var|private|Users|home|root)[^)]+|[A-Za-z]:[\\/ ][^)]+)\)/g; + +/** 图片文件扩展名 */ +const IMAGE_EXTENSIONS = /\.(png|jpg|jpeg|gif|bmp|webp|tiff|svg)$/i; + +/** + * 匹配纯文本中的本地图片路径(不在 markdown 图片语法中,跨平台): + * macOS: + * - `/var/folders/.../screenshot.png` + * - `/tmp/image.jpg` + * - `/Users/xxx/photo.png` + * Linux: + * - `/home/user/photo.png` + * - `/root/photo.png` + * Windows: + * - `C:\Users\xxx\photo.png` + * - `C:/temp/image.jpg` + * 支持 backtick 包裹: `path` + */ +const BARE_IMAGE_PATH_RE = /`?((?:\/(?:tmp|var|private|Users|home|root)\/[^\s`'",)]+|[A-Za-z]:[\\/][^\s`'",)]+)\.(?:png|jpg|jpeg|gif|bmp|webp))`?/gi; + +/** 去掉 file:// / MEDIA: / attachment:// 前缀,得到实际的绝对路径 */ +function toLocalPath(raw: string): string { + let path = raw; + if (path.startsWith('file://')) path = path.replace('file://', ''); + else if (path.startsWith('MEDIA:')) path = path.replace('MEDIA:', ''); + else if (path.startsWith('attachment://')) path = path.replace('attachment://', ''); + + // 解码 URL 编码的路径(如中文字符 %E5%9B%BE → 图) + try { + path = decodeURIComponent(path); + } catch { + // 解码失败则保持原样 + } + return path; +} + +/** + * 通用媒体文件上传函数 + * @param filePath 文件路径 + * @param mediaType 媒体类型:image, file, video, voice + * @param oapiToken 钉钉 access_token + * @param maxSize 最大文件大小(字节),默认 20MB + * @param log 日志对象 + * @returns media_id 或 null + */ +async function uploadMediaToDingTalk( + filePath: string, + mediaType: 'image' | 'file' | 'video' | 'voice', + oapiToken: string, + maxSize: number = 20 * 1024 * 1024, + log?: any, +): Promise { + try { + const fs = await import('fs'); + const path = await import('path'); + const FormData = (await import('form-data')).default; + + const absPath = toLocalPath(filePath); + if (!fs.existsSync(absPath)) { + log?.warn?.(`[DingTalk][${mediaType}] 文件不存在: ${absPath}`); + return null; + } + + // 检查文件大小 + const stats = fs.statSync(absPath); + const fileSizeMB = (stats.size / (1024 * 1024)).toFixed(2); + + if (stats.size > maxSize) { + const maxSizeMB = (maxSize / (1024 * 1024)).toFixed(0); + log?.warn?.(`[DingTalk][${mediaType}] 文件过大: ${absPath}, 大小: ${fileSizeMB}MB, 超过限制 ${maxSizeMB}MB`); + return null; + } + + const form = new FormData(); + form.append('media', fs.createReadStream(absPath), { + filename: path.basename(absPath), + contentType: mediaType === 'image' ? 'image/jpeg' : 'application/octet-stream', + }); + + log?.info?.(`[DingTalk][${mediaType}] 上传文件: ${absPath} (${fileSizeMB}MB)`); + const resp = await axios.post( + `https://oapi.dingtalk.com/media/upload?access_token=${oapiToken}&type=${mediaType}`, + form, + { headers: form.getHeaders(), timeout: 60_000 }, + ); + + const mediaId = resp.data?.media_id; + if (mediaId) { + log?.info?.(`[DingTalk][${mediaType}] 上传成功: media_id=${mediaId}`); + return mediaId; + } + log?.warn?.(`[DingTalk][${mediaType}] 上传返回无 media_id: ${JSON.stringify(resp.data)}`); + return null; + } catch (err: any) { + log?.error?.(`[DingTalk][${mediaType}] 上传失败: ${err.message}`); + return null; + } +} + +/** 扫描内容中的本地图片路径,上传到钉钉并替换为 media_id */ +async function processLocalImages( + content: string, + oapiToken: string | null, + log?: any, +): Promise { + if (!oapiToken) { + log?.warn?.(`[DingTalk][Media] 无 oapiToken,跳过图片后处理`); + return content; + } + + let result = content; + + // 第一步:匹配 markdown 图片语法 ![alt](path) + const mdMatches = [...content.matchAll(LOCAL_IMAGE_RE)]; + if (mdMatches.length > 0) { + log?.info?.(`[DingTalk][Media] 检测到 ${mdMatches.length} 个 markdown 图片,开始上传...`); + for (const match of mdMatches) { + const [fullMatch, alt, rawPath] = match; + // 清理转义字符(AI 可能会对含空格的路径添加 \ ) + const cleanPath = rawPath.replace(/\\ /g, ' '); + const mediaId = await uploadMediaToDingTalk(cleanPath, 'image', oapiToken, 20 * 1024 * 1024, log); + if (mediaId) { + result = result.replace(fullMatch, `![${alt}](${mediaId})`); + } + } + } + + // 第二步:匹配纯文本中的本地图片路径(如 `/var/folders/.../xxx.png`) + // 排除已被 markdown 图片语法包裹的路径 + const bareMatches = [...result.matchAll(BARE_IMAGE_PATH_RE)]; + const newBareMatches = bareMatches.filter(m => { + // 检查这个路径是否已经在 ![...](...) 中 + const idx = m.index!; + const before = result.slice(Math.max(0, idx - 10), idx); + return !before.includes(']('); + }); + + if (newBareMatches.length > 0) { + log?.info?.(`[DingTalk][Media] 检测到 ${newBareMatches.length} 个纯文本图片路径,开始上传...`); + // 从后往前替换,避免 index 偏移 + for (const match of newBareMatches.reverse()) { + const [fullMatch, rawPath] = match; + log?.info?.(`[DingTalk][Media] 纯文本图片: "${fullMatch}" -> path="${rawPath}"`); + const mediaId = await uploadMediaToDingTalk(rawPath, 'image', oapiToken, 20 * 1024 * 1024, log); + if (mediaId) { + const replacement = `![](${mediaId})`; + result = result.slice(0, match.index!) + result.slice(match.index!).replace(fullMatch, replacement); + log?.info?.(`[DingTalk][Media] 替换纯文本路径为图片: ${replacement}`); + } + } + } + + if (mdMatches.length === 0 && newBareMatches.length === 0) { + log?.info?.(`[DingTalk][Media] 未检测到本地图片路径`); + } + + return result; +} + +// ============ 文件后处理:提取文件标记并发送独立消息 ============ + +/** + * 文件标记正则:[DINGTALK_FILE]{"path":"...","fileName":"...","fileType":"..."}[/DINGTALK_FILE] + */ +const FILE_MARKER_PATTERN = /\[DINGTALK_FILE\]({.*?})\[\/DINGTALK_FILE\]/g; + +/** 视频大小限制:20MB */ +const MAX_VIDEO_SIZE = 20 * 1024 * 1024; + +// ============ 视频后处理:提取视频标记并发送视频消息 ============ + +/** + * 视频标记正则:[DINGTALK_VIDEO]{"path":"..."}[/DINGTALK_VIDEO] + */ +const VIDEO_MARKER_PATTERN = /\[DINGTALK_VIDEO\]({.*?})\[\/DINGTALK_VIDEO\]/g; + +/** + * 音频标记正则:[DINGTALK_AUDIO]{"path":"..."}[/DINGTALK_AUDIO] + */ +const AUDIO_MARKER_PATTERN = /\[DINGTALK_AUDIO\]({.*?})\[\/DINGTALK_AUDIO\]/g; + +/** 视频信息接口 */ +interface VideoInfo { + path: string; +} + +/** 视频元数据接口 */ +interface VideoMetadata { + duration: number; + width: number; + height: number; +} + +/** + * 提取视频元数据(时长、分辨率) + */ +async function extractVideoMetadata( + filePath: string, + log?: any, +): Promise { + try { + const ffmpeg = require('fluent-ffmpeg'); + const ffmpegPath = require('@ffmpeg-installer/ffmpeg').path; + ffmpeg.setFfmpegPath(ffmpegPath); + + return new Promise((resolve, reject) => { + ffmpeg.ffprobe(filePath, (err: any, metadata: any) => { + if (err) { + log?.error?.(`[DingTalk][Video] 提取元数据失败: ${err.message}`); + return reject(err); + } + + const videoStream = metadata.streams.find((s: any) => s.codec_type === 'video'); + if (!videoStream) { + log?.warn?.(`[DingTalk][Video] 未找到视频流`); + return resolve(null); + } + + const result = { + duration: Math.floor(metadata.format.duration || 0), + width: videoStream.width || 0, + height: videoStream.height || 0, + }; + + log?.info?.(`[DingTalk][Video] 元数据: duration=${result.duration}s, ${result.width}x${result.height}`); + resolve(result); + }); + }); + } catch (err: any) { + log?.error?.(`[DingTalk][Video] ffprobe 失败: ${err.message}`); + return null; + } +} + +/** + * 生成视频封面图(第1秒截图) + */ +async function extractVideoThumbnail( + videoPath: string, + outputPath: string, + log?: any, +): Promise { + try { + const ffmpeg = require('fluent-ffmpeg'); + const ffmpegPath = require('@ffmpeg-installer/ffmpeg').path; + const path = await import('path'); + ffmpeg.setFfmpegPath(ffmpegPath); + + return new Promise((resolve, reject) => { + ffmpeg(videoPath) + .screenshots({ + count: 1, + folder: path.dirname(outputPath), + filename: path.basename(outputPath), + timemarks: ['1'], + size: '?x360', + }) + .on('end', () => { + log?.info?.(`[DingTalk][Video] 封面生成成功: ${outputPath}`); + resolve(outputPath); + }) + .on('error', (err: any) => { + log?.error?.(`[DingTalk][Video] 封面生成失败: ${err.message}`); + reject(err); + }); + }); + } catch (err: any) { + log?.error?.(`[DingTalk][Video] ffmpeg 失败: ${err.message}`); + return null; + } +} + +/** + * 发送视频消息到钉钉 + */ +async function sendVideoMessage( + config: any, + sessionWebhook: string, + videoInfo: VideoInfo, + videoMediaId: string, + picMediaId: string, + metadata: VideoMetadata, + oapiToken: string, + log?: any, +): Promise { + try { + const path = await import('path'); + const fileName = path.basename(videoInfo.path); + + const payload = { + msgtype: 'video', + video: { + duration: metadata.duration.toString(), + videoMediaId: videoMediaId, + videoType: 'mp4', + picMediaId: picMediaId, + }, + }; + + log?.info?.(`[DingTalk][Video] 发送视频消息: ${fileName}, payload: ${JSON.stringify(payload)}`); + const resp = await axios.post(sessionWebhook, payload, { + headers: { + 'x-acs-dingtalk-access-token': oapiToken, + 'Content-Type': 'application/json', + }, + timeout: 10_000, + }); + + if (resp.data?.success !== false) { + log?.info?.(`[DingTalk][Video] 视频消息发送成功: ${fileName}`); + } else { + log?.error?.(`[DingTalk][Video] 视频消息发送失败: ${JSON.stringify(resp.data)}`); + } + } catch (err: any) { + log?.error?.(`[DingTalk][Video] 发送失败: ${err.message}`); + } +} + +/** + * 视频后处理主函数 + * 返回移除标记后的内容,并附带视频处理的状态提示 + * + * @param useProactiveApi 是否使用主动消息 API(用于 AI Card 场景) + * @param target 主动 API 需要的目标信息(useProactiveApi=true 时必须提供) + */ +async function processVideoMarkers( + content: string, + sessionWebhook: string, + config: any, + oapiToken: string | null, + log?: any, + useProactiveApi: boolean = false, + target?: AICardTarget, +): Promise { + const logPrefix = useProactiveApi ? '[DingTalk][Video][Proactive]' : '[DingTalk][Video]'; + + if (!oapiToken) { + log?.warn?.(`${logPrefix} 无 oapiToken,跳过视频处理`); + return content; + } + + const fs = await import('fs'); + const path = await import('path'); + const os = await import('os'); + + // 提取视频标记 + const matches = [...content.matchAll(VIDEO_MARKER_PATTERN)]; + const videoInfos: VideoInfo[] = []; + const invalidVideos: string[] = []; + + for (const match of matches) { + try { + const videoInfo = JSON.parse(match[1]) as VideoInfo; + if (videoInfo.path && fs.existsSync(videoInfo.path)) { + videoInfos.push(videoInfo); + log?.info?.(`${logPrefix} 提取到视频: ${videoInfo.path}`); + } else { + invalidVideos.push(videoInfo.path || '未知路径'); + log?.warn?.(`${logPrefix} 视频文件不存在: ${videoInfo.path}`); + } + } catch (err: any) { + log?.warn?.(`${logPrefix} 解析标记失败: ${err.message}`); + } + } + + if (videoInfos.length === 0 && invalidVideos.length === 0) { + log?.info?.(`${logPrefix} 未检测到视频标记`); + return content.replace(VIDEO_MARKER_PATTERN, '').trim(); + } + + // 先移除所有视频标记,保留其他文本内容 + let cleanedContent = content.replace(VIDEO_MARKER_PATTERN, '').trim(); + + // 收集处理结果状态 + const statusMessages: string[] = []; + + // 处理无效视频 + for (const invalidPath of invalidVideos) { + statusMessages.push(`⚠️ 视频文件不存在: ${path.basename(invalidPath)}`); + } + + if (videoInfos.length > 0) { + log?.info?.(`${logPrefix} 检测到 ${videoInfos.length} 个视频,开始处理...`); + } + + // 逐个处理视频 + for (const videoInfo of videoInfos) { + const fileName = path.basename(videoInfo.path); + let thumbnailPath = ''; + try { + // 1. 提取元数据 + const metadata = await extractVideoMetadata(videoInfo.path, log); + if (!metadata) { + log?.warn?.(`${logPrefix} 无法提取元数据: ${videoInfo.path}`); + statusMessages.push(`⚠️ 视频处理失败: ${fileName}(无法读取视频信息,请检查 ffmpeg 是否已安装)`); + continue; + } + + // 2. 生成封面 + thumbnailPath = path.join(os.tmpdir(), `thumbnail_${Date.now()}.jpg`); + const thumbnail = await extractVideoThumbnail(videoInfo.path, thumbnailPath, log); + if (!thumbnail) { + log?.warn?.(`${logPrefix} 无法生成封面: ${videoInfo.path}`); + statusMessages.push(`⚠️ 视频处理失败: ${fileName}(无法生成封面)`); + continue; + } + + // 3. 上传视频 + const videoMediaId = await uploadMediaToDingTalk(videoInfo.path, 'video', oapiToken, MAX_VIDEO_SIZE, log); + if (!videoMediaId) { + log?.warn?.(`${logPrefix} 视频上传失败: ${videoInfo.path}`); + statusMessages.push(`⚠️ 视频上传失败: ${fileName}(文件可能超过 20MB 限制)`); + continue; + } + + // 4. 上传封面 + const picMediaId = await uploadMediaToDingTalk(thumbnailPath, 'image', oapiToken, 20 * 1024 * 1024, log); + if (!picMediaId) { + log?.warn?.(`${logPrefix} 封面上传失败: ${thumbnailPath}`); + statusMessages.push(`⚠️ 视频封面上传失败: ${fileName}`); + continue; + } + + // 5. 发送视频消息 + if (useProactiveApi && target) { + await sendVideoProactive(config, target, videoMediaId, picMediaId, metadata, log); + } else { + await sendVideoMessage(config, sessionWebhook, videoInfo, videoMediaId, picMediaId, metadata, oapiToken, log); + } + + log?.info?.(`${logPrefix} 视频处理完成: ${fileName}`); + statusMessages.push(`✅ 视频已发送: ${fileName}`); + } catch (err: any) { + log?.error?.(`${logPrefix} 处理视频失败: ${err.message}`); + statusMessages.push(`⚠️ 视频处理异常: ${fileName}(${err.message})`); + } finally { + // 统一清理临时文件 + if (thumbnailPath) { + try { + fs.unlinkSync(thumbnailPath); + } catch { + // 文件可能不存在,忽略删除错误 + } + } + } + } + + // 将状态信息附加到清理后的内容 + if (statusMessages.length > 0) { + const statusText = statusMessages.join('\n'); + cleanedContent = cleanedContent + ? `${cleanedContent}\n\n${statusText}` + : statusText; + } + + return cleanedContent; +} + +/** 音频文件扩展名 */ +const AUDIO_EXTENSIONS = ['mp3', 'wav', 'amr', 'ogg', 'aac', 'flac', 'm4a']; + + +/** 判断是否为音频文件 */ +function isAudioFile(fileType: string): boolean { + return AUDIO_EXTENSIONS.includes(fileType.toLowerCase()); +} + +/** 文件大小限制:20MB(字节) */ +const MAX_FILE_SIZE = 20 * 1024 * 1024; + +/** 文件信息接口 */ +interface FileInfo { + path: string; // 本地文件路径 + fileName: string; // 文件名 + fileType: string; // 文件类型(扩展名) +} + +/** + * 从内容中提取文件标记 + * @returns { cleanedContent, fileInfos } + */ +function extractFileMarkers(content: string, log?: any): { cleanedContent: string; fileInfos: FileInfo[] } { + const fileInfos: FileInfo[] = []; + const matches = [...content.matchAll(FILE_MARKER_PATTERN)]; + + for (const match of matches) { + try { + const fileInfo = JSON.parse(match[1]) as FileInfo; + + // 验证必需字段 + if (fileInfo.path && fileInfo.fileName) { + fileInfos.push(fileInfo); + log?.info?.(`[DingTalk][File] 提取到文件标记: ${fileInfo.fileName}`); + } + } catch (err: any) { + log?.warn?.(`[DingTalk][File] 解析文件标记失败: ${match[1]}, 错误: ${err.message}`); + } + } + + // 移除文件标记,返回清理后的内容 + const cleanedContent = content.replace(FILE_MARKER_PATTERN, '').trim(); + return { cleanedContent, fileInfos }; +} + + +/** + * 发送文件消息到钉钉 + */ +async function sendFileMessage( + config: any, + sessionWebhook: string, + fileInfo: FileInfo, + mediaId: string, + oapiToken: string, + log?: any, +): Promise { + try { + const fileMessage = { + msgtype: 'file', + file: { + mediaId: mediaId, + fileName: fileInfo.fileName, + fileType: fileInfo.fileType, + }, + }; + + log?.info?.(`[DingTalk][File] 发送文件消息: ${fileInfo.fileName}`); + const resp = await axios.post(sessionWebhook, fileMessage, { + headers: { + 'x-acs-dingtalk-access-token': oapiToken, + 'Content-Type': 'application/json', + }, + timeout: 10_000, + }); + + if (resp.data?.success !== false) { + log?.info?.(`[DingTalk][File] 文件消息发送成功: ${fileInfo.fileName}`); + } else { + log?.error?.(`[DingTalk][File] 文件消息发送失败: ${JSON.stringify(resp.data)}`); + } + } catch (err: any) { + log?.error?.(`[DingTalk][File] 发送文件消息异常: ${fileInfo.fileName}, 错误: ${err.message}`); + } +} + +/** + * 获取 ffprobe 可执行文件路径 + * 优先级: @ffprobe-installer/ffprobe > FFPROBE_PATH 环境变量 > 系统 PATH + */ +function getFfprobePath(): string { + // 1. 尝试 @ffprobe-installer/ffprobe 包 + try { + const ffprobePath = require('@ffprobe-installer/ffprobe').path; + if (ffprobePath) return ffprobePath; + } catch { /* 未安装,跳过 */ } + + // 2. 尝试环境变量 + if (process.env.FFPROBE_PATH) return process.env.FFPROBE_PATH; + + // 3. fallback 到系统 PATH + return 'ffprobe'; +} + +/** + * 提取音频文件时长(毫秒) + * 使用 ffprobe CLI 直接获取,避免 fluent-ffmpeg 在部分运行环境中回调不触发的问题 + */ +async function extractAudioDuration( + filePath: string, + log?: any, +): Promise { + try { + const { execFile } = require('child_process'); + const ffprobeBin = getFfprobePath(); + + return new Promise((resolve) => { + execFile(ffprobeBin, [ + '-v', 'quiet', + '-print_format', 'json', + '-show_format', + filePath, + ], { timeout: 10_000 }, (err: any, stdout: string, stderr: string) => { + if (err) { + log?.error?.(`[DingTalk][Audio] ffprobe 执行失败 (${ffprobeBin}): ${err.message}`); + return resolve(null); + } + + try { + const parsed = JSON.parse(stdout); + const durationSec = parseFloat(parsed?.format?.duration); + if (isNaN(durationSec)) { + log?.warn?.(`[DingTalk][Audio] 无法解析音频时长,ffprobe 输出: ${stdout.slice(0, 200)}`); + return resolve(null); + } + + const durationMs = Math.floor(durationSec * 1000); + log?.info?.(`[DingTalk][Audio] 音频时长: ${durationMs}ms (${durationSec}s)`); + resolve(durationMs); + } catch (parseErr: any) { + log?.error?.(`[DingTalk][Audio] ffprobe 输出解析失败: ${parseErr.message}`); + resolve(null); + } + }); + }); + } catch (err: any) { + log?.error?.(`[DingTalk][Audio] extractAudioDuration 异常: ${err.message}`); + return null; + } +} + +/** + * 发送音频消息到钉钉(被动回复场景) + */ +async function sendAudioMessage( + config: any, + sessionWebhook: string, + fileInfo: FileInfo, + mediaId: string, + oapiToken: string, + log?: any, + durationMs?: number, +): Promise { + try { + // 钉钉语音消息格式 + const actualDuration = (durationMs && durationMs > 0) ? durationMs.toString() : '60000'; + const audioMessage = { + msgtype: 'voice', + voice: { + mediaId: mediaId, + duration: actualDuration, + }, + }; + + log?.info?.(`[DingTalk][Audio] 发送语音消息: ${fileInfo.fileName}`); + const resp = await axios.post(sessionWebhook, audioMessage, { + headers: { + 'x-acs-dingtalk-access-token': oapiToken, + 'Content-Type': 'application/json', + }, + timeout: 10_000, + }); + + if (resp.data?.success !== false) { + log?.info?.(`[DingTalk][Audio] 语音消息发送成功: ${fileInfo.fileName}`); + } else { + log?.error?.(`[DingTalk][Audio] 语音消息发送失败: ${JSON.stringify(resp.data)}`); + } + } catch (err: any) { + log?.error?.(`[DingTalk][Audio] 发送语音消息异常: ${fileInfo.fileName}, 错误: ${err.message}`); + } +} + +/** + * 处理文件标记:提取、上传、发送独立消息 + * 返回移除标记后的内容,并附带文件处理的状态提示 + * + * @param useProactiveApi 是否使用主动消息 API(用于 AI Card 场景,避免 sessionWebhook 失效问题) + * @param target 主动 API 需要的目标信息(useProactiveApi=true 时必须提供) + */ +async function processFileMarkers( + content: string, + sessionWebhook: string, + config: any, + oapiToken: string | null, + log?: any, + useProactiveApi: boolean = false, + target?: AICardTarget, +): Promise { + if (!oapiToken) { + log?.warn?.(`[DingTalk][File] 无 oapiToken,跳过文件处理`); + return content; + } + + const { cleanedContent, fileInfos } = extractFileMarkers(content, log); + + if (fileInfos.length === 0) { + log?.info?.(`[DingTalk][File] 未检测到文件标记`); + return cleanedContent; + } + + log?.info?.(`[DingTalk][File] 检测到 ${fileInfos.length} 个文件标记,开始处理... (useProactiveApi=${useProactiveApi})`); + + const statusMessages: string[] = []; + + const fs = await import('fs'); + + // 逐个上传并发送文件消息 + for (const fileInfo of fileInfos) { + // 预检查:文件是否存在、是否超限 + const absPath = toLocalPath(fileInfo.path); + if (!fs.existsSync(absPath)) { + statusMessages.push(`⚠️ 文件不存在: ${fileInfo.fileName}`); + continue; + } + const stats = fs.statSync(absPath); + if (stats.size > MAX_FILE_SIZE) { + const sizeMB = (stats.size / (1024 * 1024)).toFixed(1); + const maxMB = (MAX_FILE_SIZE / (1024 * 1024)).toFixed(0); + statusMessages.push(`⚠️ 文件过大无法发送: ${fileInfo.fileName}(${sizeMB}MB,限制 ${maxMB}MB)`); + continue; + } + + // 区分音频文件和普通文件 + if (isAudioFile(fileInfo.fileType)) { + // 音频文件使用 voice 类型上传 + const mediaId = await uploadMediaToDingTalk(fileInfo.path, 'voice', oapiToken, MAX_FILE_SIZE, log); + if (mediaId) { + // 提取音频实际时长 + const audioDurationMs = await extractAudioDuration(fileInfo.path, log); + if (useProactiveApi && target) { + // 使用主动消息 API(适用于 AI Card 场景) + await sendAudioProactive(config, target, fileInfo, mediaId, log, audioDurationMs ?? undefined); + } else { + // 使用 sessionWebhook(传统被动回复场景) + await sendAudioMessage(config, sessionWebhook, fileInfo, mediaId, oapiToken, log, audioDurationMs ?? undefined); + } + statusMessages.push(`✅ 音频已发送: ${fileInfo.fileName}`); + } else { + log?.error?.(`[DingTalk][Audio] 音频上传失败,跳过发送: ${fileInfo.fileName}`); + statusMessages.push(`⚠️ 音频上传失败: ${fileInfo.fileName}`); + } + } else { + // 普通文件 + const mediaId = await uploadMediaToDingTalk(fileInfo.path, 'file', oapiToken, MAX_FILE_SIZE, log); + if (mediaId) { + if (useProactiveApi && target) { + // 使用主动消息 API(适用于 AI Card 场景) + await sendFileProactive(config, target, fileInfo, mediaId, log); + } else { + // 使用 sessionWebhook(传统被动回复场景) + await sendFileMessage(config, sessionWebhook, fileInfo, mediaId, oapiToken, log); + } + statusMessages.push(`✅ 文件已发送: ${fileInfo.fileName}`); + } else { + log?.error?.(`[DingTalk][File] 文件上传失败,跳过发送: ${fileInfo.fileName}`); + statusMessages.push(`⚠️ 文件上传失败: ${fileInfo.fileName}`); + } + } + } + + // 将状态信息附加到清理后的内容 + if (statusMessages.length > 0) { + const statusText = statusMessages.join('\n'); + return cleanedContent + ? `${cleanedContent}\n\n${statusText}` + : statusText; + } + + return cleanedContent; +} + +// ============ AI Card Streaming ============ + +const DINGTALK_API = 'https://api.dingtalk.com'; +const DINGTALK_OAPI = 'https://oapi.dingtalk.com'; +const AI_CARD_TEMPLATE_ID = '382e4302-551d-4880-bf29-a30acfab2e71.schema'; + +// flowStatus 值与 Python SDK AICardStatus 一致(cardParamMap 的值必须是字符串) +const AICardStatus = { + PROCESSING: '1', + INPUTING: '2', + FINISHED: '3', + EXECUTING: '4', + FAILED: '5', +} as const; + +interface AICardInstance { + cardInstanceId: string; + accessToken: string; + inputingStarted: boolean; +} + +/** + * 创建 AI Card 实例(被动回复场景) + * 从钉钉回调 data 中提取目标信息,委托给通用函数 + */ +async function createAICard( + config: any, + data: any, + log?: any, +): Promise { + const isGroup = data.conversationType === '2'; + + log?.info?.(`[DingTalk][AICard] conversationType=${data.conversationType}, conversationId=${data.conversationId}, senderStaffId=${data.senderStaffId}, senderId=${data.senderId}`); + + // 构建通用目标 + const target: AICardTarget = isGroup + ? { type: 'group', openConversationId: data.conversationId } + : { type: 'user', userId: data.senderStaffId || data.senderId }; + + return createAICardForTarget(config, target, log); +} + +// 流式更新 AI Card 内容 +async function streamAICard( + card: AICardInstance, + content: string, + finished: boolean = false, + log?: any, +): Promise { + // 首次 streaming 前,先切换到 INPUTING 状态(与 Python SDK get_card_data(INPUTING) 一致) + if (!card.inputingStarted) { + const statusBody = { + outTrackId: card.cardInstanceId, + cardData: { + cardParamMap: { + flowStatus: AICardStatus.INPUTING, + msgContent: '', + staticMsgContent: '', + sys_full_json_obj: JSON.stringify({ + order: ['msgContent'], // 只声明实际使用的字段,避免部分客户端显示空占位 + }), + }, + }, + }; + log?.info?.(`[DingTalk][AICard] PUT /v1.0/card/instances (INPUTING) outTrackId=${card.cardInstanceId}`); + try { + const statusResp = await axios.put(`${DINGTALK_API}/v1.0/card/instances`, statusBody, { + headers: { 'x-acs-dingtalk-access-token': card.accessToken, 'Content-Type': 'application/json' }, + }); + log?.info?.(`[DingTalk][AICard] INPUTING 响应: status=${statusResp.status} data=${JSON.stringify(statusResp.data)}`); + } catch (err: any) { + log?.error?.(`[DingTalk][AICard] INPUTING 切换失败: ${err.message}, resp=${JSON.stringify(err.response?.data)}`); + throw err; + } + card.inputingStarted = true; + } + + // 调用 streaming API 更新内容 + const body = { + outTrackId: card.cardInstanceId, + guid: `${Date.now()}_${Math.random().toString(36).slice(2, 8)}`, + key: 'msgContent', + content: content, + isFull: true, // 全量替换 + isFinalize: finished, + isError: false, + }; + + log?.info?.(`[DingTalk][AICard] PUT /v1.0/card/streaming contentLen=${content.length} isFinalize=${finished} guid=${body.guid}`); + try { + const streamResp = await axios.put(`${DINGTALK_API}/v1.0/card/streaming`, body, { + headers: { 'x-acs-dingtalk-access-token': card.accessToken, 'Content-Type': 'application/json' }, + }); + log?.info?.(`[DingTalk][AICard] streaming 响应: status=${streamResp.status}`); + } catch (err: any) { + log?.error?.(`[DingTalk][AICard] streaming 更新失败: ${err.message}, resp=${JSON.stringify(err.response?.data)}`); + throw err; + } +} + +// 完成 AI Card:先 streaming isFinalize 关闭流式通道,再 put_card_data 更新 FINISHED 状态 +async function finishAICard( + card: AICardInstance, + content: string, + log?: any, +): Promise { + log?.info?.(`[DingTalk][AICard] 开始 finish,最终内容长度=${content.length}`); + + // 1. 先用最终内容关闭流式通道(isFinalize=true),确保卡片显示替换后的内容 + await streamAICard(card, content, true, log); + + // 2. 更新卡片状态为 FINISHED + const body = { + outTrackId: card.cardInstanceId, + cardData: { + cardParamMap: { + flowStatus: AICardStatus.FINISHED, + msgContent: content, + staticMsgContent: '', + sys_full_json_obj: JSON.stringify({ + order: ['msgContent'], // 只声明实际使用的字段,避免部分客户端显示空占位 + }), + }, + }, + }; + + log?.info?.(`[DingTalk][AICard] PUT /v1.0/card/instances (FINISHED) outTrackId=${card.cardInstanceId}`); + try { + const finishResp = await axios.put(`${DINGTALK_API}/v1.0/card/instances`, body, { + headers: { 'x-acs-dingtalk-access-token': card.accessToken, 'Content-Type': 'application/json' }, + }); + log?.info?.(`[DingTalk][AICard] FINISHED 响应: status=${finishResp.status} data=${JSON.stringify(finishResp.data)}`); + } catch (err: any) { + log?.error?.(`[DingTalk][AICard] FINISHED 更新失败: ${err.message}, resp=${JSON.stringify(err.response?.data)}`); + } +} + +// ============ Gateway SSE Streaming ============ + +// ============ Bindings 匹配逻辑 ============ + +interface BindingMatch { + channel?: string; + accountId?: string; + peer?: { + kind?: 'direct' | 'group'; + id?: string; + }; +} + +interface Binding { + agentId: string; + match?: BindingMatch; +} + +/** + * 根据 OpenClaw bindings 配置解析 agentId + * + * 匹配优先级(从高到低): + * 1. peer.kind + peer.id 精确匹配(非 '*') + * 2. peer.kind + peer.id='*' 通配匹配 + * 3. peer.kind 匹配(无 peer.id) + * 4. accountId 匹配 + * 5. channel 匹配 + * 6. 默认 fallback + * + * @param accountId 账号 ID + * @param peerKind 会话类型:'direct'(单聊)或 'group'(群聊) + * @param peerId 发送者 ID(单聊)或会话 ID(群聊) + * @param log 日志对象 + * @returns 匹配到的 agentId + */ +function resolveAgentIdByBindings( + accountId: string, + peerKind: 'direct' | 'group', + peerId: string, + log?: any, +): string { + const rt = getRuntime(); + const defaultAgentId = accountId === DEFAULT_ACCOUNT_ID ? 'main' : accountId; + + // 读取 OpenClaw 配置 + let bindings: Binding[] = []; + try { + const configPath = path.join(os.homedir(), '.openclaw', 'openclaw.json'); + if (fs.existsSync(configPath)) { + const configContent = fs.readFileSync(configPath, 'utf-8'); + const config = JSON.parse(configContent); + bindings = config.bindings || []; + } + } catch (err: any) { + log?.warn?.(`[DingTalk][Bindings] 读取 OpenClaw 配置失败: ${err.message}`); + return defaultAgentId; + } + + if (bindings.length === 0) { + log?.info?.(`[DingTalk][Bindings] 无 bindings 配置,使用默认 agentId=${defaultAgentId}`); + return defaultAgentId; + } + + // 筛选 channel='dingtalk-connector' 的 bindings + const channelBindings = bindings.filter(b => + !b.match?.channel || b.match.channel === 'dingtalk-connector' + ); + + if (channelBindings.length === 0) { + log?.info?.(`[DingTalk][Bindings] 无匹配 channel 的 bindings,使用默认 agentId=${defaultAgentId}`); + return defaultAgentId; + } + + log?.info?.(`[DingTalk][Bindings] 开始匹配: accountId=${accountId}, peerKind=${peerKind}, peerId=${peerId}, bindings数量=${channelBindings.length}`); + + // 按优先级匹配 + // 优先级1: peer.kind + peer.id 精确匹配 + for (const binding of channelBindings) { + const match = binding.match || {}; + if (match.peer?.kind === peerKind && + match.peer?.id && + match.peer.id !== '*' && + match.peer.id === peerId) { + // 还需检查 accountId 是否匹配(如果指定了) + if (match.accountId && match.accountId !== accountId) continue; + log?.info?.(`[DingTalk][Bindings] 精确匹配 peer.id: agentId=${binding.agentId}`); + return binding.agentId || defaultAgentId; + } + } + + // 优先级2: peer.kind + peer.id='*' 通配匹配 + for (const binding of channelBindings) { + const match = binding.match || {}; + if (match.peer?.kind === peerKind && match.peer?.id === '*') { + if (match.accountId && match.accountId !== accountId) continue; + log?.info?.(`[DingTalk][Bindings] 通配匹配 peer.kind=${peerKind}, peer.id=*: agentId=${binding.agentId}`); + return binding.agentId || defaultAgentId; + } + } + + // 优先级3: 仅 peer.kind 匹配(无 peer.id) + for (const binding of channelBindings) { + const match = binding.match || {}; + if (match.peer?.kind === peerKind && !match.peer?.id) { + if (match.accountId && match.accountId !== accountId) continue; + log?.info?.(`[DingTalk][Bindings] 匹配 peer.kind=${peerKind}: agentId=${binding.agentId}`); + return binding.agentId || defaultAgentId; + } + } + + // 优先级4: accountId 匹配(无 peer 配置) + for (const binding of channelBindings) { + const match = binding.match || {}; + if (!match.peer && match.accountId === accountId) { + log?.info?.(`[DingTalk][Bindings] 匹配 accountId=${accountId}: agentId=${binding.agentId}`); + return binding.agentId || defaultAgentId; + } + } + + // 优先级5: 仅 channel 匹配(无 peer 和 accountId) + for (const binding of channelBindings) { + const match = binding.match || {}; + if (!match.peer && !match.accountId) { + log?.info?.(`[DingTalk][Bindings] 匹配 channel=dingtalk-connector: agentId=${binding.agentId}`); + return binding.agentId || defaultAgentId; + } + } + + log?.info?.(`[DingTalk][Bindings] 无匹配,使用默认 agentId=${defaultAgentId}`); + return defaultAgentId; +} + +interface GatewayOptions { + userContent: string; + systemPrompts: string[]; + sessionContext: SessionContext; + gatewayAuth?: string; // token 或 password,都用 Bearer 格式 + /** 记忆归属用户标识,用于 Gateway 区分记忆;sharedMemoryAcrossConversations=true 时传 accountId,false 时传 sessionContext JSON */ + memoryUser?: string; + /** 本地图片文件路径列表,用于 OpenClaw AgentMediaPayload */ + imageLocalPaths?: string[]; + /** 会话类型:'direct'(单聊)或 'group'(群聊),用于 bindings 匹配 */ + peerKind?: 'direct' | 'group'; + /** 发送者 ID,用于 bindings 匹配 */ + peerId?: string; + gatewayPort?: number; + log?: any; +} + +async function* streamFromGateway(options: GatewayOptions, accountId: string): AsyncGenerator { + const { userContent, systemPrompts, sessionKey, gatewayAuth, memoryUser, imageLocalPaths, peerKind, peerId, gatewayPort, log } = options; + const rt = getRuntime(); + const port = gatewayPort || rt.gateway?.port || 18789; + const gatewayUrl = `http://127.0.0.1:${port}/v1/chat/completions`; + + const messages: any[] = []; + for (const prompt of systemPrompts) { + messages.push({ role: 'system', content: prompt }); + } + + // 如果有图片,在文本中嵌入本地文件路径(OpenClaw AgentMediaPayload 格式) + let finalContent = userContent; + if (imageLocalPaths && imageLocalPaths.length > 0) { + const imageMarkdown = imageLocalPaths.map(p => `![image](file://${p})`).join('\n'); + finalContent = finalContent ? `${finalContent}\n\n${imageMarkdown}` : imageMarkdown; + log?.info?.(`[DingTalk][Gateway] 附加 ${imageLocalPaths.length} 张本地图片路径`); + } + messages.push({ role: 'user', content: finalContent }); + + const headers: Record = { 'Content-Type': 'application/json' }; + if (gatewayAuth) { + headers['Authorization'] = `Bearer ${gatewayAuth}`; + } + // 使用 bindings 配置解析 agentId,支持基于 peer.kind(单聊/群聊)的路由 + // 如果没有提供 peerKind/peerId,则回退到原有逻辑 + const agentId = (peerKind && peerId) + ? resolveAgentIdByBindings(accountId, peerKind, peerId, log) + : (accountId === DEFAULT_ACCOUNT_ID ? 'main' : accountId); + headers['X-OpenClaw-Agent-Id'] = agentId; + if (memoryUser) { + // 使用 Base64 编码处理可能包含中文字符的 memoryUser + // HTTP Header 只能包含 ASCII 字符,中文字符会导致 ByteString 编码错误 + headers['X-OpenClaw-Memory-User'] = Buffer.from(memoryUser, 'utf-8').toString('base64'); + } + + log?.info?.(`[DingTalk][Gateway] POST ${gatewayUrl}, session=${sessionKey}, accountId=${accountId}, agentId=${agentId}, peerKind=${peerKind}, messages=${messages.length}`); + + const response = await fetch(gatewayUrl, { + method: 'POST', + headers, + body: JSON.stringify({ + model: 'main', + messages, + stream: true, + user: sessionKey, // 用于 session 持久化 + }), + }); + + log?.info?.(`[DingTalk][Gateway] 响应 status=${response.status}, ok=${response.ok}, hasBody=${!!response.body}`); + + if (!response.ok || !response.body) { + const errText = response.body ? await response.text() : '(no body)'; + log?.error?.(`[DingTalk][Gateway] 错误响应: ${errText}`); + throw new Error(`Gateway error: ${response.status} - ${errText}`); + } + + const reader = response.body.getReader(); + const decoder = new TextDecoder(); + let buffer = ''; + + while (true) { + const { done, value } = await reader.read(); + if (done) break; + + buffer += decoder.decode(value, { stream: true }); + const lines = buffer.split('\n'); + buffer = lines.pop() || ''; + + for (const line of lines) { + if (!line.startsWith('data: ')) continue; + const data = line.slice(6).trim(); + if (data === '[DONE]') return; + + try { + const chunk = JSON.parse(data); + const content = chunk.choices?.[0]?.delta?.content; + if (content) yield content; + } catch {} + } + } +} + +// ============ 图片下载到本地文件 ============ + +/** + * 下载钉钉图片到本地临时文件 + * 返回本地文件路径,用于 OpenClaw AgentMediaPayload + */ +async function downloadImageToFile( + downloadUrl: string, + log?: any, +): Promise { + try { + log?.info?.(`[DingTalk][Image] 开始下载图片: ${downloadUrl.slice(0, 100)}...`); + const resp = await axios.get(downloadUrl, { + responseType: 'arraybuffer', + timeout: 30_000, + }); + + const buffer = Buffer.from(resp.data); + const contentType = resp.headers['content-type'] || 'image/jpeg'; + const ext = contentType.includes('png') ? '.png' : contentType.includes('gif') ? '.gif' : contentType.includes('webp') ? '.webp' : '.jpg'; + const mediaDir = path.join(os.homedir(), '.openclaw', 'workspace', 'media', 'inbound'); + fs.mkdirSync(mediaDir, { recursive: true }); + const tmpFile = path.join(mediaDir, `openclaw-media-${Date.now()}-${Math.random().toString(36).slice(2, 8)}${ext}`); + fs.writeFileSync(tmpFile, buffer); + + log?.info?.(`[DingTalk][Image] 图片下载成功: size=${buffer.length} bytes, type=${contentType}, path=${tmpFile}`); + return tmpFile; + } catch (err: any) { + log?.error?.(`[DingTalk][Image] 图片下载失败: ${err.message}`); + return null; + } +} + +/** + * 通过钉钉 API 下载媒体文件(需要 access_token) + * 适用于 picture/file 类型的 downloadCode + */ +async function downloadMediaByCode( + downloadCode: string, + config: any, + log?: any, +): Promise { + try { + const token = await getAccessToken(config); + log?.info?.(`[DingTalk][Image] 通过 downloadCode 下载媒体: ${downloadCode.slice(0, 30)}...`); + + const resp = await axios.post( + `${DINGTALK_API}/v1.0/robot/messageFiles/download`, + { downloadCode, robotCode: config.clientId }, + { + headers: { 'x-acs-dingtalk-access-token': token, 'Content-Type': 'application/json' }, + timeout: 30_000, + }, + ); + + const downloadUrl = resp.data?.downloadUrl; + if (!downloadUrl) { + log?.warn?.(`[DingTalk][Image] downloadCode 换取 downloadUrl 失败: ${JSON.stringify(resp.data)}`); + return null; + } + + return downloadImageToFile(downloadUrl, log); + } catch (err: any) { + log?.error?.(`[DingTalk][Image] downloadCode 下载失败: ${err.message}`); + return null; + } +} + +/** + * 通过钉钉 API 下载文件附件(需要 access_token) + * 与 downloadMediaByCode 不同,此函数保留原始文件名 + */ +async function downloadFileByCode( + downloadCode: string, + fileName: string, + config: any, + log?: any, +): Promise { + try { + const token = await getAccessToken(config); + log?.info?.(`[DingTalk][File] 通过 downloadCode 下载文件: ${fileName}`); + + const resp = await axios.post( + `${DINGTALK_API}/v1.0/robot/messageFiles/download`, + { downloadCode, robotCode: config.clientId }, + { + headers: { 'x-acs-dingtalk-access-token': token, 'Content-Type': 'application/json' }, + timeout: 30_000, + }, + ); + + const downloadUrl = resp.data?.downloadUrl; + if (!downloadUrl) { + log?.warn?.(`[DingTalk][File] downloadCode 换取 downloadUrl 失败: ${JSON.stringify(resp.data)}`); + return null; + } + + // 下载文件内容 + const fileResp = await axios.get(downloadUrl, { + responseType: 'arraybuffer', + timeout: 60_000, + }); + + const buffer = Buffer.from(fileResp.data); + const mediaDir = path.join(os.homedir(), '.openclaw', 'workspace', 'media', 'inbound'); + fs.mkdirSync(mediaDir, { recursive: true }); + + // 用时间戳前缀避免文件名冲突,保留原始文件名 + const safeFileName = fileName.replace(/[/\\:*?"<>|]/g, '_'); + const localPath = path.join(mediaDir, `${Date.now()}-${safeFileName}`); + fs.writeFileSync(localPath, buffer); + + log?.info?.(`[DingTalk][File] 文件下载成功: size=${buffer.length} bytes, path=${localPath}`); + return localPath; + } catch (err: any) { + log?.error?.(`[DingTalk][File] 文件下载失败: ${err.message}`); + return null; + } +} + +/** 可直接读取内容的文本类文件扩展名 */ +const TEXT_FILE_EXTENSIONS = new Set(['.txt', '.md', '.csv', '.json', '.xml', '.yaml', '.yml', '.html', '.htm', '.log', '.conf', '.ini', '.sh', '.py', '.js', '.ts', '.css', '.sql']); + +/** 需要保存但无法直接读取的 Office/二进制文件扩展名 */ +const OFFICE_FILE_EXTENSIONS = new Set(['.docx', '.xlsx', '.pptx', '.pdf', '.doc', '.xls', '.ppt', '.zip', '.rar', '.7z']); + +// ============ 消息处理 ============ + +/** 消息内容提取结果 */ +interface ExtractedMessage { + text: string; + messageType: string; + /** 图片 URL 列表(来自 richText 或 picture 消息) */ + imageUrls: string[]; + /** 图片 downloadCode 列表(用于通过 API 下载) */ + downloadCodes: string[]; + /** 文件名列表(与 downloadCodes 对应,用于文件类型消息) */ + fileNames: string[]; + /** at的钉钉用户ID列表 */ + atDingtalkIds: string[]; + /** at的手机号列表 */ + atMobiles: string[]; +} + +function extractMessageContent(data: any): ExtractedMessage { + const msgtype = data.msgtype || 'text'; + switch (msgtype) { + case 'text': { + const atDingtalkIds = data.text?.at?.atDingtalkIds || []; + const atMobiles = data.text?.at?.atMobiles || []; + return { + text: data.text?.content?.trim() || '', + messageType: 'text', + imageUrls: [], + downloadCodes: [], + fileNames: [], + atDingtalkIds, + atMobiles + }; + } + case 'richText': { + const parts = data.content?.richText || []; + const textParts: string[] = []; + const imageUrls: string[] = []; + + for (const part of parts) { + if (part.text) { + textParts.push(part.text); + } + if (part.pictureUrl) { + imageUrls.push(part.pictureUrl); + } + if (part.type === 'picture' && part.downloadCode) { + // 有些 richText 图片通过 downloadCode 获取 + imageUrls.push(`downloadCode:${part.downloadCode}`); + } + } + + const text = textParts.join('') || (imageUrls.length > 0 ? '[图片]' : '[富文本消息]'); + return { text, messageType: 'richText', imageUrls, downloadCodes: [], fileNames: [], atDingtalkIds: [], atMobiles: [] }; + } + case 'picture': { + const downloadCode = data.content?.downloadCode || ''; + const pictureUrl = data.content?.pictureUrl || ''; + const imageUrls: string[] = []; + const downloadCodes: string[] = []; + + if (pictureUrl) { + imageUrls.push(pictureUrl); + } + if (downloadCode) { + downloadCodes.push(downloadCode); + } + + return { text: '[图片]', messageType: 'picture', imageUrls, downloadCodes, fileNames: [], atDingtalkIds: [], atMobiles: [] }; + } + case 'audio': + return { text: data.content?.recognition || '[语音消息]', messageType: 'audio', imageUrls: [], downloadCodes: [], fileNames: [], atDingtalkIds: [], atMobiles: [] }; + case 'video': + return { text: '[视频]', messageType: 'video', imageUrls: [], downloadCodes: [], fileNames: [], atDingtalkIds: [], atMobiles: [] }; + case 'file': { + const fileName = data.content?.fileName || '文件'; + const downloadCode = data.content?.downloadCode || ''; + const downloadCodes: string[] = []; + const fileNames: string[] = []; + if (downloadCode) { + downloadCodes.push(downloadCode); + fileNames.push(fileName); + } + return { text: `[文件: ${fileName}]`, messageType: 'file', imageUrls: [], downloadCodes, fileNames, atDingtalkIds: [], atMobiles: [] }; + } + default: + return { text: data.text?.content?.trim() || `[${msgtype}消息]`, messageType: msgtype, imageUrls: [], downloadCodes: [], fileNames: [], atDingtalkIds: [], atMobiles: [] }; + } +} + +// 发送 Markdown 消息 +async function sendMarkdownMessage( + config: any, + sessionWebhook: string, + title: string, + markdown: string, + options: any = {}, +): Promise { + const token = await getAccessToken(config); + let text = markdown; + if (options.atUserId) text = `${text} @${options.atUserId}`; + + const body: any = { + msgtype: 'markdown', + markdown: { title: title || 'Moltbot', text }, + }; + if (options.atUserId) body.at = { atUserIds: [options.atUserId], isAtAll: false }; + + return (await axios.post(sessionWebhook, body, { + headers: { 'x-acs-dingtalk-access-token': token, 'Content-Type': 'application/json' }, + })).data; +} + +// 发送文本消息 +async function sendTextMessage( + config: any, + sessionWebhook: string, + text: string, + options: any = {}, +): Promise { + const token = await getAccessToken(config); + const body: any = { msgtype: 'text', text: { content: text } }; + if (options.atUserId) body.at = { atUserIds: [options.atUserId], isAtAll: false }; + + return (await axios.post(sessionWebhook, body, { + headers: { 'x-acs-dingtalk-access-token': token, 'Content-Type': 'application/json' }, + })).data; +} + +// 智能选择 text / markdown +async function sendMessage( + config: any, + sessionWebhook: string, + text: string, + options: any = {}, +): Promise { + const hasMarkdown = /^[#*>-]|[*_`#\[\]]/.test(text) || text.includes('\n'); + const useMarkdown = options.useMarkdown !== false && (options.useMarkdown || hasMarkdown); + + if (useMarkdown) { + const title = options.title + || text.split('\n')[0].replace(/^[#*\s\->]+/, '').slice(0, 20) + || 'Moltbot'; + return sendMarkdownMessage(config, sessionWebhook, title, text, options); + } + return sendTextMessage(config, sessionWebhook, text, options); +} + +// ============ 主动发送消息 API ============ + +/** 消息类型枚举 */ +type DingTalkMsgType = 'text' | 'markdown' | 'link' | 'actionCard' | 'image'; + +/** 主动发送消息的结果 */ +interface SendResult { + ok: boolean; + processQueryKey?: string; + cardInstanceId?: string; // AI Card 成功时返回 + error?: string; + usedAICard?: boolean; // 是否使用了 AI Card +} + +/** 主动发送选项 */ +interface ProactiveSendOptions { + msgType?: DingTalkMsgType; + title?: string; + log?: any; + useAICard?: boolean; // 是否使用 AI Card,默认 true + fallbackToNormal?: boolean; // AI Card 失败时是否降级到普通消息,默认 true +} + +/** AI Card 投放目标类型 */ +type AICardTarget = + | { type: 'user'; userId: string } + | { type: 'group'; openConversationId: string }; + +/** + * 构建卡片投放请求体(提取公共逻辑) + */ +function buildDeliverBody( + cardInstanceId: string, + target: AICardTarget, + robotCode: string, +): any { + const base = { outTrackId: cardInstanceId, userIdType: 1 }; + + if (target.type === 'group') { + return { + ...base, + openSpaceId: `dtv1.card//IM_GROUP.${target.openConversationId}`, + imGroupOpenDeliverModel: { robotCode }, + }; + } + + return { + ...base, + openSpaceId: `dtv1.card//IM_ROBOT.${target.userId}`, + imRobotOpenDeliverModel: { spaceType: 'IM_ROBOT', robotCode }, + }; +} + +/** + * 通用 AI Card 创建函数 + * 支持被动回复和主动发送两种场景 + */ +async function createAICardForTarget( + config: any, + target: AICardTarget, + log?: any, +): Promise { + const targetDesc = target.type === 'group' + ? `群聊 ${target.openConversationId}` + : `用户 ${target.userId}`; + + try { + const token = await getAccessToken(config); + const cardInstanceId = `card_${Date.now()}_${Math.random().toString(36).slice(2, 10)}`; + + log?.info?.(`[DingTalk][AICard] 开始创建卡片: ${targetDesc}, outTrackId=${cardInstanceId}`); + + // 1. 创建卡片实例 + const createBody = { + cardTemplateId: AI_CARD_TEMPLATE_ID, + outTrackId: cardInstanceId, + cardData: { cardParamMap: {} }, + callbackType: 'STREAM', + imGroupOpenSpaceModel: { supportForward: true }, + imRobotOpenSpaceModel: { supportForward: true }, + }; + + log?.info?.(`[DingTalk][AICard] POST /v1.0/card/instances`); + const createResp = await axios.post(`${DINGTALK_API}/v1.0/card/instances`, createBody, { + headers: { 'x-acs-dingtalk-access-token': token, 'Content-Type': 'application/json' }, + }); + log?.info?.(`[DingTalk][AICard] 创建卡片响应: status=${createResp.status}`); + + // 2. 投放卡片 + const deliverBody = buildDeliverBody(cardInstanceId, target, config.clientId); + + log?.info?.(`[DingTalk][AICard] POST /v1.0/card/instances/deliver body=${JSON.stringify(deliverBody)}`); + const deliverResp = await axios.post(`${DINGTALK_API}/v1.0/card/instances/deliver`, deliverBody, { + headers: { 'x-acs-dingtalk-access-token': token, 'Content-Type': 'application/json' }, + }); + log?.info?.(`[DingTalk][AICard] 投放卡片响应: status=${deliverResp.status}`); + + return { cardInstanceId, accessToken: token, inputingStarted: false }; + } catch (err: any) { + log?.error?.(`[DingTalk][AICard] 创建卡片失败 (${targetDesc}): ${err.message}`); + if (err.response) { + log?.error?.(`[DingTalk][AICard] 错误响应: status=${err.response.status} data=${JSON.stringify(err.response.data)}`); + } + return null; + } +} + +/** + * 主动发送文件消息(使用普通消息 API) + */ +async function sendFileProactive( + config: any, + target: AICardTarget, + fileInfo: FileInfo, + mediaId: string, + log?: any, +): Promise { + try { + const token = await getAccessToken(config); + + // 钉钉普通消息 API 的文件消息格式 + const msgParam = { + mediaId: mediaId, + fileName: fileInfo.fileName, + fileType: fileInfo.fileType, + }; + + const body: any = { + robotCode: config.clientId, + msgKey: 'sampleFile', + msgParam: JSON.stringify(msgParam), + }; + + let endpoint: string; + if (target.type === 'group') { + body.openConversationId = target.openConversationId; + endpoint = `${DINGTALK_API}/v1.0/robot/groupMessages/send`; + } else { + body.userIds = [target.userId]; + endpoint = `${DINGTALK_API}/v1.0/robot/oToMessages/batchSend`; + } + + log?.info?.(`[DingTalk][File][Proactive] 发送文件消息: ${fileInfo.fileName}`); + const resp = await axios.post(endpoint, body, { + headers: { 'x-acs-dingtalk-access-token': token, 'Content-Type': 'application/json' }, + timeout: 10_000, + }); + + if (resp.data?.processQueryKey) { + log?.info?.(`[DingTalk][File][Proactive] 文件消息发送成功: ${fileInfo.fileName}`); + } else { + log?.warn?.(`[DingTalk][File][Proactive] 文件消息发送响应异常: ${JSON.stringify(resp.data)}`); + } + } catch (err: any) { + log?.error?.(`[DingTalk][File][Proactive] 发送文件消息失败: ${fileInfo.fileName}, 错误: ${err.message}`); + } +} + +/** + * 主动发送音频消息(使用普通消息 API) + */ +async function sendAudioProactive( + config: any, + target: AICardTarget, + fileInfo: FileInfo, + mediaId: string, + log?: any, + durationMs?: number, +): Promise { + try { + const token = await getAccessToken(config); + + // 钉钉普通消息 API 的音频消息格式 + const actualDuration = (durationMs && durationMs > 0) ? durationMs.toString() : '60000'; + const msgParam = { + mediaId: mediaId, + duration: actualDuration, + }; + + const body: any = { + robotCode: config.clientId, + msgKey: 'sampleAudio', + msgParam: JSON.stringify(msgParam), + }; + + let endpoint: string; + if (target.type === 'group') { + body.openConversationId = target.openConversationId; + endpoint = `${DINGTALK_API}/v1.0/robot/groupMessages/send`; + } else { + body.userIds = [target.userId]; + endpoint = `${DINGTALK_API}/v1.0/robot/oToMessages/batchSend`; + } + + log?.info?.(`[DingTalk][Audio][Proactive] 发送音频消息: ${fileInfo.fileName}`); + const resp = await axios.post(endpoint, body, { + headers: { 'x-acs-dingtalk-access-token': token, 'Content-Type': 'application/json' }, + timeout: 10_000, + }); + + if (resp.data?.processQueryKey) { + log?.info?.(`[DingTalk][Audio][Proactive] 音频消息发送成功: ${fileInfo.fileName}`); + } else { + log?.warn?.(`[DingTalk][Audio][Proactive] 音频消息发送响应异常: ${JSON.stringify(resp.data)}`); + } + } catch (err: any) { + log?.error?.(`[DingTalk][Audio][Proactive] 发送音频消息失败: ${fileInfo.fileName}, 错误: ${err.message}`); + } +} + +/** + * 主动发送视频消息(使用普通消息 API) + */ +async function sendVideoProactive( + config: any, + target: AICardTarget, + videoMediaId: string, + picMediaId: string, + metadata: VideoMetadata, + log?: any, +): Promise { + try { + const token = await getAccessToken(config); + + // 钉钉普通消息 API 的视频消息格式 + const msgParam = { + duration: metadata.duration.toString(), + videoMediaId: videoMediaId, + videoType: 'mp4', + picMediaId: picMediaId, + }; + + const body: any = { + robotCode: config.clientId, + msgKey: 'sampleVideo', + msgParam: JSON.stringify(msgParam), + }; + + let endpoint: string; + if (target.type === 'group') { + body.openConversationId = target.openConversationId; + endpoint = `${DINGTALK_API}/v1.0/robot/groupMessages/send`; + } else { + body.userIds = [target.userId]; + endpoint = `${DINGTALK_API}/v1.0/robot/oToMessages/batchSend`; + } + + log?.info?.(`[DingTalk][Video][Proactive] 发送视频消息`); + const resp = await axios.post(endpoint, body, { + headers: { 'x-acs-dingtalk-access-token': token, 'Content-Type': 'application/json' }, + timeout: 10_000, + }); + + if (resp.data?.processQueryKey) { + log?.info?.(`[DingTalk][Video][Proactive] 视频消息发送成功`); + } else { + log?.warn?.(`[DingTalk][Video][Proactive] 视频消息发送响应异常: ${JSON.stringify(resp.data)}`); + } + } catch (err: any) { + log?.error?.(`[DingTalk][Video][Proactive] 发送视频消息失败: ${err.message}`); + } +} + +/** 音频信息接口 */ +interface AudioInfo { + path: string; +} + +/** + * 提取音频标记并发送音频消息 + * 解析 [DINGTALK_AUDIO]{"path":"..."}[/DINGTALK_AUDIO] 标记 + * + * @param useProactiveApi 是否使用主动消息 API(用于 AI Card 场景) + * @param target 主动 API 需要的目标信息(useProactiveApi=true 时必须提供) + */ +async function processAudioMarkers( + content: string, + sessionWebhook: string, + config: any, + oapiToken: string | null, + log?: any, + useProactiveApi: boolean = false, + target?: AICardTarget, +): Promise { + const logPrefix = useProactiveApi ? '[DingTalk][Audio][Proactive]' : '[DingTalk][Audio]'; + + if (!oapiToken) { + log?.warn?.(`${logPrefix} 无 oapiToken,跳过音频处理`); + return content; + } + + const fs = await import('fs'); + const path = await import('path'); + + const matches = [...content.matchAll(AUDIO_MARKER_PATTERN)]; + const audioInfos: AudioInfo[] = []; + const invalidAudios: string[] = []; + + for (const match of matches) { + try { + const audioInfo = JSON.parse(match[1]) as AudioInfo; + if (audioInfo.path && fs.existsSync(audioInfo.path)) { + audioInfos.push(audioInfo); + log?.info?.(`${logPrefix} 提取到音频: ${audioInfo.path}`); + } else { + invalidAudios.push(audioInfo.path || '未知路径'); + log?.warn?.(`${logPrefix} 音频文件不存在: ${audioInfo.path}`); + } + } catch (err: any) { + log?.warn?.(`${logPrefix} 解析标记失败: ${err.message}`); + } + } + + if (audioInfos.length === 0 && invalidAudios.length === 0) { + log?.info?.(`${logPrefix} 未检测到音频标记`); + return content.replace(AUDIO_MARKER_PATTERN, '').trim(); + } + + // 先移除所有音频标记 + let cleanedContent = content.replace(AUDIO_MARKER_PATTERN, '').trim(); + + const statusMessages: string[] = []; + + for (const invalidPath of invalidAudios) { + statusMessages.push(`⚠️ 音频文件不存在: ${path.basename(invalidPath)}`); + } + + if (audioInfos.length > 0) { + log?.info?.(`${logPrefix} 检测到 ${audioInfos.length} 个音频,开始处理...`); + } + + for (const audioInfo of audioInfos) { + const fileName = path.basename(audioInfo.path); + try { + const ext = path.extname(audioInfo.path).slice(1).toLowerCase(); + + const fileInfo: FileInfo = { + path: audioInfo.path, + fileName: fileName, + fileType: ext, + }; + + // 上传音频到钉钉 + const mediaId = await uploadMediaToDingTalk(audioInfo.path, 'voice', oapiToken, 20 * 1024 * 1024, log); + if (!mediaId) { + statusMessages.push(`⚠️ 音频上传失败: ${fileName}(文件可能超过 20MB 限制)`); + continue; + } + + // 提取音频实际时长 + const audioDurationMs = await extractAudioDuration(audioInfo.path, log); + + // 发送音频消息 + if (useProactiveApi && target) { + await sendAudioProactive(config, target, fileInfo, mediaId, log, audioDurationMs ?? undefined); + } else { + await sendAudioMessage(config, sessionWebhook, fileInfo, mediaId, oapiToken, log, audioDurationMs ?? undefined); + } + statusMessages.push(`✅ 音频已发送: ${fileName}`); + log?.info?.(`${logPrefix} 音频处理完成: ${fileName}`); + } catch (err: any) { + log?.error?.(`${logPrefix} 处理音频失败: ${err.message}`); + statusMessages.push(`⚠️ 音频处理异常: ${fileName}(${err.message})`); + } + } + + if (statusMessages.length > 0) { + const statusText = statusMessages.join('\n'); + cleanedContent = cleanedContent + ? `${cleanedContent}\n\n${statusText}` + : statusText; + } + + return cleanedContent; +} + +/** + * 主动创建并发送 AI Card(通用内部实现) + * 复用 createAICardForTarget 并完整支持后处理 + * @param config 钉钉配置 + * @param target 投放目标(单聊或群聊) + * @param content 消息内容 + * @param log 日志对象 + * @returns SendResult + */ +async function sendAICardInternal( + config: any, + target: AICardTarget, + content: string, + log?: any, +): Promise { + const targetDesc = target.type === 'group' + ? `群聊 ${target.openConversationId}` + : `用户 ${target.userId}`; + + try { + // 0. 获取 oapiToken 用于后处理 + const oapiToken = await getOapiAccessToken(config); + + // 1. 后处理01:上传本地图片到钉钉,替换路径为 media_id + let processedContent = content; + if (oapiToken) { + log?.info?.(`[DingTalk][AICard][Proactive] 开始图片后处理`); + processedContent = await processLocalImages(content, oapiToken, log); + } else { + log?.warn?.(`[DingTalk][AICard][Proactive] 无法获取 oapiToken,跳过媒体后处理`); + } + + // 2. 后处理02:提取视频标记并发送视频消息 + log?.info?.(`[DingTalk][Video][Proactive] 开始视频后处理`); + processedContent = await processVideoMarkers(processedContent, '', config, oapiToken, log, true, target); + + // 3. 后处理03:提取音频标记并发送音频消息(使用主动消息 API) + log?.info?.(`[DingTalk][Audio][Proactive] 开始音频后处理`); + processedContent = await processAudioMarkers(processedContent, '', config, oapiToken, log, true, target); + + // 4. 后处理04:提取文件标记并发送独立文件消息(使用主动消息 API) + log?.info?.(`[DingTalk][File][Proactive] 开始文件后处理`); + processedContent = await processFileMarkers(processedContent, '', config, oapiToken, log, true, target); + + // 5. 检查处理后的内容是否为空(纯文件/视频/音频消息场景) + // 如果内容只包含文件/视频/音频标记,处理后会变成空字符串,此时跳过创建空白 AI Card + const trimmedContent = processedContent.trim(); + if (!trimmedContent) { + log?.info?.(`[DingTalk][AICard][Proactive] 处理后内容为空(纯文件/视频消息),跳过创建 AI Card`); + return { ok: true, usedAICard: false }; + } + + // 5. 创建卡片(复用通用函数) + const card = await createAICardForTarget(config, target, log); + if (!card) { + return { ok: false, error: 'Failed to create AI Card', usedAICard: false }; + } + + // 6. 使用 finishAICard 设置内容 + await finishAICard(card, processedContent, log); + + log?.info?.(`[DingTalk][AICard][Proactive] AI Card 发送成功: ${targetDesc}, cardInstanceId=${card.cardInstanceId}`); + return { ok: true, cardInstanceId: card.cardInstanceId, usedAICard: true }; + + } catch (err: any) { + log?.error?.(`[DingTalk][AICard][Proactive] AI Card 发送失败 (${targetDesc}): ${err.message}`); + if (err.response) { + log?.error?.(`[DingTalk][AICard][Proactive] 错误响应: status=${err.response.status} data=${JSON.stringify(err.response.data)}`); + } + return { ok: false, error: err.response?.data?.message || err.message, usedAICard: false }; + } +} + +/** + * 主动发送 AI Card 到单聊用户 + */ +async function sendAICardToUser( + config: any, + userId: string, + content: string, + log?: any, +): Promise { + return sendAICardInternal(config, { type: 'user', userId }, content, log); +} + +/** + * 主动发送 AI Card 到群聊 + */ +async function sendAICardToGroup( + config: any, + openConversationId: string, + content: string, + log?: any, +): Promise { + return sendAICardInternal(config, { type: 'group', openConversationId }, content, log); +} + +/** + * 构建普通消息的 msgKey 和 msgParam + * 提取公共逻辑,供 sendNormalToUser 和 sendNormalToGroup 复用 + */ +function buildMsgPayload( + msgType: DingTalkMsgType, + content: string, + title?: string, +): { msgKey: string; msgParam: Record } | { error: string } { + switch (msgType) { + case 'markdown': + return { + msgKey: 'sampleMarkdown', + msgParam: { + title: title || content.split('\n')[0].replace(/^[#*\s\->]+/, '').slice(0, 20) || 'Message', + text: content, + }, + }; + case 'link': + try { + return { + msgKey: 'sampleLink', + msgParam: typeof content === 'string' ? JSON.parse(content) : content, + }; + } catch { + return { error: 'Invalid link message format, expected JSON' }; + } + case 'actionCard': + try { + return { + msgKey: 'sampleActionCard', + msgParam: typeof content === 'string' ? JSON.parse(content) : content, + }; + } catch { + return { error: 'Invalid actionCard message format, expected JSON' }; + } + case 'image': + return { + msgKey: 'sampleImageMsg', + msgParam: { photoURL: content }, + }; + case 'text': + default: + return { + msgKey: 'sampleText', + msgParam: { content }, + }; + } +} + +/** + * 使用普通消息 API 发送单聊消息(降级方案) + */ +async function sendNormalToUser( + config: any, + userIds: string | string[], + content: string, + options: { msgType?: DingTalkMsgType; title?: string; log?: any } = {}, +): Promise { + const { msgType = 'text', title, log } = options; + const userIdArray = Array.isArray(userIds) ? userIds : [userIds]; + + // 构建消息参数 + const payload = buildMsgPayload(msgType, content, title); + if ('error' in payload) { + return { ok: false, error: payload.error, usedAICard: false }; + } + + try { + const token = await getAccessToken(config); + const body = { + robotCode: config.clientId, + userIds: userIdArray, + msgKey: payload.msgKey, + msgParam: JSON.stringify(payload.msgParam), + }; + + log?.info?.(`[DingTalk][Normal] 发送单聊消息: userIds=${userIdArray.join(',')}, msgType=${msgType}`); + + const resp = await axios.post(`${DINGTALK_API}/v1.0/robot/oToMessages/batchSend`, body, { + headers: { 'x-acs-dingtalk-access-token': token, 'Content-Type': 'application/json' }, + timeout: 10_000, + }); + + if (resp.data?.processQueryKey) { + log?.info?.(`[DingTalk][Normal] 发送成功: processQueryKey=${resp.data.processQueryKey}`); + return { ok: true, processQueryKey: resp.data.processQueryKey, usedAICard: false }; + } + + log?.warn?.(`[DingTalk][Normal] 发送响应异常: ${JSON.stringify(resp.data)}`); + return { ok: false, error: resp.data?.message || 'Unknown error', usedAICard: false }; + } catch (err: any) { + const errMsg = err.response?.data?.message || err.message; + log?.error?.(`[DingTalk][Normal] 发送失败: ${errMsg}`); + return { ok: false, error: errMsg, usedAICard: false }; + } +} + +/** + * 使用普通消息 API 发送群聊消息(降级方案) + */ +async function sendNormalToGroup( + config: any, + openConversationId: string, + content: string, + options: { msgType?: DingTalkMsgType; title?: string; log?: any } = {}, +): Promise { + const { msgType = 'text', title, log } = options; + + // 构建消息参数 + const payload = buildMsgPayload(msgType, content, title); + if ('error' in payload) { + return { ok: false, error: payload.error, usedAICard: false }; + } + + try { + const token = await getAccessToken(config); + const body = { + robotCode: config.clientId, + openConversationId, + msgKey: payload.msgKey, + msgParam: JSON.stringify(payload.msgParam), + }; + + log?.info?.(`[DingTalk][Normal] 发送群聊消息: openConversationId=${openConversationId}, msgType=${msgType}`); + + const resp = await axios.post(`${DINGTALK_API}/v1.0/robot/groupMessages/send`, body, { + headers: { 'x-acs-dingtalk-access-token': token, 'Content-Type': 'application/json' }, + timeout: 10_000, + }); + + if (resp.data?.processQueryKey) { + log?.info?.(`[DingTalk][Normal] 发送成功: processQueryKey=${resp.data.processQueryKey}`); + return { ok: true, processQueryKey: resp.data.processQueryKey, usedAICard: false }; + } + + log?.warn?.(`[DingTalk][Normal] 发送响应异常: ${JSON.stringify(resp.data)}`); + return { ok: false, error: resp.data?.message || 'Unknown error', usedAICard: false }; + } catch (err: any) { + const errMsg = err.response?.data?.message || err.message; + log?.error?.(`[DingTalk][Normal] 发送失败: ${errMsg}`); + return { ok: false, error: errMsg, usedAICard: false }; + } +} + +/** + * 主动发送单聊消息给指定用户 + * 默认使用 AI Card,失败时降级到普通消息 + * @param config 钉钉配置(需包含 clientId 和 clientSecret) + * @param userIds 用户 ID 数组(staffId 或 unionId) + * @param content 消息内容 + * @param options 可选配置 + */ +async function sendToUser( + config: any, + userIds: string | string[], + content: string, + options: ProactiveSendOptions = {}, +): Promise { + const { log, useAICard = true, fallbackToNormal = true } = options; + + if (!config.clientId || !config.clientSecret) { + return { ok: false, error: 'Missing clientId or clientSecret', usedAICard: false }; + } + + const userIdArray = Array.isArray(userIds) ? userIds : [userIds]; + if (userIdArray.length === 0) { + return { ok: false, error: 'userIds cannot be empty', usedAICard: false }; + } + + // AI Card 只支持单个用户 + if (useAICard && userIdArray.length === 1) { + log?.info?.(`[DingTalk][SendToUser] 尝试使用 AI Card 发送: userId=${userIdArray[0]}`); + const cardResult = await sendAICardToUser(config, userIdArray[0], content, log); + + if (cardResult.ok) { + return cardResult; + } + + // AI Card 失败 + log?.warn?.(`[DingTalk][SendToUser] AI Card 发送失败: ${cardResult.error}`); + + if (!fallbackToNormal) { + log?.error?.(`[DingTalk][SendToUser] 不降级到普通消息,返回错误`); + return cardResult; + } + + log?.info?.(`[DingTalk][SendToUser] 降级到普通消息发送`); + } else if (useAICard && userIdArray.length > 1) { + log?.info?.(`[DingTalk][SendToUser] 多用户发送不支持 AI Card,使用普通消息`); + } + + // 使用普通消息 + return sendNormalToUser(config, userIdArray, content, options); +} + +/** + * 主动发送群聊消息到指定群 + * 默认使用 AI Card,失败时降级到普通消息 + * @param config 钉钉配置(需包含 clientId 和 clientSecret) + * @param openConversationId 群会话 ID + * @param content 消息内容 + * @param options 可选配置 + */ +async function sendToGroup( + config: any, + openConversationId: string, + content: string, + options: ProactiveSendOptions = {}, +): Promise { + const { log, useAICard = true, fallbackToNormal = true } = options; + + if (!config.clientId || !config.clientSecret) { + return { ok: false, error: 'Missing clientId or clientSecret', usedAICard: false }; + } + + if (!openConversationId) { + return { ok: false, error: 'openConversationId cannot be empty', usedAICard: false }; + } + + // 尝试使用 AI Card + if (useAICard) { + log?.info?.(`[DingTalk][SendToGroup] 尝试使用 AI Card 发送: openConversationId=${openConversationId}`); + const cardResult = await sendAICardToGroup(config, openConversationId, content, log); + + if (cardResult.ok) { + return cardResult; + } + + // AI Card 失败 + log?.warn?.(`[DingTalk][SendToGroup] AI Card 发送失败: ${cardResult.error}`); + + if (!fallbackToNormal) { + log?.error?.(`[DingTalk][SendToGroup] 不降级到普通消息,返回错误`); + return cardResult; + } + + log?.info?.(`[DingTalk][SendToGroup] 降级到普通消息发送`); + } + + // 使用普通消息 + return sendNormalToGroup(config, openConversationId, content, options); +} + +/** + * 智能发送消息 + * 默认使用 AI Card,失败时降级到普通消息 + * @param config 钉钉配置 + * @param target 目标:{ userId } 或 { openConversationId } + * @param content 消息内容 + * @param options 可选配置 + */ +async function sendProactive( + config: any, + target: { userId?: string; userIds?: string[]; openConversationId?: string }, + content: string, + options: ProactiveSendOptions = {}, +): Promise { + // 自动检测是否使用 markdown(用于降级时) + if (!options.msgType) { + const hasMarkdown = /^[#*>-]|[*_`#\[\]]/.test(content) || content.includes('\n'); + if (hasMarkdown) { + options.msgType = 'markdown'; + } + } + + // 发送到用户 + if (target.userId || target.userIds) { + const userIds = target.userIds || [target.userId!]; + return sendToUser(config, userIds, content, options); + } + + // 发送到群 + if (target.openConversationId) { + return sendToGroup(config, target.openConversationId, content, options); + } + + return { ok: false, error: 'Must specify userId, userIds, or openConversationId', usedAICard: false }; +} + +// ============ 核心消息处理 (AI Card Streaming) ============ + +async function handleDingTalkMessage(params: { + cfg: ClawdbotConfig; + accountId: string; + data: any; + sessionWebhook: string; + log?: any; + dingtalkConfig: any; +}): Promise { + const { cfg, accountId, data, sessionWebhook, log, dingtalkConfig } = params; + + const content = extractMessageContent(data); + if (!content.text && content.imageUrls.length === 0 && content.downloadCodes.length === 0) return; + + const isDirect = data.conversationType === '1'; + const senderId = data.senderStaffId || data.senderId; + const senderName = data.senderNick || 'Unknown'; + + log?.info?.(`[DingTalk] 收到消息: from=${senderName} type=${content.messageType} text="${content.text.slice(0, 50)}..." images=${content.imageUrls.length} downloadCodes=${content.downloadCodes.length}`); + + // ===== DM Policy 检查 ===== + if (isDirect) { + const dmPolicy = dingtalkConfig.dmPolicy || 'open'; + const allowFrom: string[] = dingtalkConfig.allowFrom || []; + if (dmPolicy === 'allowlist' && allowFrom.length > 0 && !allowFrom.includes(senderId)) { + log?.warn?.(`[DingTalk] DM 被拦截: senderId=${senderId} 不在 allowFrom 白名单中`); + return; + } + } + + // ===== Session 管理 ===== + const forceNewSession = isNewSessionCommand(content.text); + + // 如果是新会话命令,直接回复确认消息 + if (forceNewSession) { + await sendMessage(dingtalkConfig, sessionWebhook, '✨ 已开启新会话,之前的对话已清空。', { + atUserId: !isDirect ? senderId : null, + }); + log?.info?.(`[DingTalk] 用户请求新会话: ${senderId}`); + return; + } + + // 构建 OpenClaw 标准会话上下文 + // 兼容旧配置:sessionTimeout 已废弃,打印警告 + if (dingtalkConfig.sessionTimeout !== undefined) { + log?.warn?.(`[DingTalk][Deprecation] 'sessionTimeout' 配置已废弃,会话超时由 OpenClaw Gateway 的 session.reset 配置控制`); + } + const separateSessionByConversation = dingtalkConfig.separateSessionByConversation as boolean | undefined; + const groupSessionScope = dingtalkConfig.groupSessionScope as 'group' | 'group_sender' | undefined; + const sessionContext = buildSessionContext({ + accountId, + senderId, + senderName, + conversationType: data.conversationType, + conversationId: data.conversationId, + groupSubject: data.conversationTitle, + separateSessionByConversation, + groupSessionScope, + }); + const sessionContextJson = JSON.stringify(sessionContext); + log?.info?.(`[DingTalk][Session] context=${sessionContextJson}`); + + // memoryUser 用于 Gateway 区分记忆归属 + // 使用 peerId(不包含中文)作为标识符,避免 HTTP Header 编码问题 + const memoryUser = dingtalkConfig.sharedMemoryAcrossConversations === true + ? accountId + : `${sessionContext.channel}:${sessionContext.accountId}:${sessionContext.peerId}`; + + // Gateway 认证:优先使用 token,其次 password + const gatewayAuth = dingtalkConfig.gatewayToken || dingtalkConfig.gatewayPassword || ''; + + // 构建 system prompts & 获取 oapi token(用于图片和文件后处理) + const systemPrompts: string[] = []; + let oapiToken: string | null = null; + + if (dingtalkConfig.enableMediaUpload !== false) { + // 添加图片和文件使用提示(告诉 LLM 直接输出本地路径或文件标记) + systemPrompts.push(buildMediaSystemPrompt()); + // 获取 token 用于后处理上传 + oapiToken = await getOapiAccessToken(dingtalkConfig); + log?.info?.(`[DingTalk][Media] oapiToken 获取${oapiToken ? '成功' : '失败'}`); + } else { + log?.info?.(`[DingTalk][Media] enableMediaUpload=false,跳过`); + } + + // 自定义 system prompt + if (dingtalkConfig.systemPrompt) { + systemPrompts.push(dingtalkConfig.systemPrompt); + } + + // ===== 图片下载到本地文件(用于 OpenClaw AgentMediaPayload) ===== + const imageLocalPaths: string[] = []; + + // 处理直接图片 URL(来自 richText 的 pictureUrl) + for (const url of content.imageUrls) { + if (url.startsWith('downloadCode:')) { + // 通过 downloadCode 下载 + const code = url.slice('downloadCode:'.length); + const localPath = await downloadMediaByCode(code, dingtalkConfig, log); + if (localPath) imageLocalPaths.push(localPath); + } else { + // 直接 URL 下载 + const localPath = await downloadImageToFile(url, log); + if (localPath) imageLocalPaths.push(localPath); + } + } + + // 处理 downloadCode(来自 picture 消息,fileNames 为空的是图片) + for (let i = 0; i < content.downloadCodes.length; i++) { + const code = content.downloadCodes[i]; + const fileName = content.fileNames[i]; // 有 fileName 说明是文件,否则是图片 + if (!fileName) { + const localPath = await downloadMediaByCode(code, dingtalkConfig, log); + if (localPath) imageLocalPaths.push(localPath); + } + } + + if (imageLocalPaths.length > 0) { + log?.info?.(`[DingTalk][Image] 成功下载 ${imageLocalPaths.length} 张图片到本地`); + } + + // ===== 文件附件下载与内容提取 ===== + const fileContentParts: string[] = []; + for (let i = 0; i < content.downloadCodes.length; i++) { + const code = content.downloadCodes[i]; + const fileName = content.fileNames[i]; + if (!fileName) continue; // 图片已在上面处理 + + const ext = path.extname(fileName).toLowerCase(); + const localPath = await downloadFileByCode(code, fileName, dingtalkConfig, log); + + if (!localPath) { + fileContentParts.push(`[文件下载失败: ${fileName}]`); + continue; + } + + if (TEXT_FILE_EXTENSIONS.has(ext)) { + // 文本类文件:读取内容追加到消息 + try { + const fileContent = fs.readFileSync(localPath, 'utf-8'); + const maxLen = 50_000; // 限制最大读取长度 + const truncated = fileContent.length > maxLen ? fileContent.slice(0, maxLen) + '\n...(内容过长,已截断)' : fileContent; + fileContentParts.push(`[文件: ${fileName}]\n\`\`\`\n${truncated}\n\`\`\``); + log?.info?.(`[DingTalk][File] 文本文件已读取: ${fileName}, size=${fileContent.length}`); + } catch (err: any) { + log?.error?.(`[DingTalk][File] 读取文本文件失败: ${err.message}`); + fileContentParts.push(`[文件已保存: ${localPath},但读取内容失败]`); + } + } else if (ext === '.docx') { + // Word 文档:用 mammoth 提取纯文本 + try { + const mammoth = await import('mammoth'); + const result = await mammoth.default.extractRawText({ path: localPath }); + const fileContent = result.value; + const maxLen = 50_000; + const truncated = fileContent.length > maxLen ? fileContent.slice(0, maxLen) + '\n...(内容过长,已截断)' : fileContent; + fileContentParts.push(`[文件: ${fileName}]\n\`\`\`\n${truncated}\n\`\`\``); + log?.info?.(`[DingTalk][File] Word 文档已提取文本: ${fileName}, size=${fileContent.length}`); + } catch (err: any) { + log?.error?.(`[DingTalk][File] Word 文档文本提取失败: ${err.message}`); + fileContentParts.push(`[文件已保存: ${localPath},但提取文本失败]`); + } + } else if (ext === '.pdf') { + // PDF 文档:用 pdf-parse 提取纯文本 + try { + const pdfParse = (await import('pdf-parse')).default; + const dataBuffer = fs.readFileSync(localPath); + const pdfData = await pdfParse(dataBuffer); + const fileContent = pdfData.text; + const maxLen = 50_000; + const truncated = fileContent.length > maxLen ? fileContent.slice(0, maxLen) + '\n...(内容过长,已截断)' : fileContent; + fileContentParts.push(`[文件: ${fileName}]\n\`\`\`\n${truncated}\n\`\`\``); + log?.info?.(`[DingTalk][File] PDF 文档已提取文本: ${fileName}, size=${fileContent.length}`); + } catch (err: any) { + log?.error?.(`[DingTalk][File] PDF 文档文本提取失败: ${err.message}`); + fileContentParts.push(`[文件已保存: ${localPath},但提取文本失败]`); + } + } else { + // Office/二进制文件:保存到本地,提示路径 + fileContentParts.push(`[文件已保存: ${localPath},请基于文件名和上下文回答]`); + log?.info?.(`[DingTalk][File] 文件已保存: ${fileName} -> ${localPath}`); + } + } + + // 对于纯图片消息(无文本),添加默认提示 + let userContent = content.text || (imageLocalPaths.length > 0 ? '请描述这张图片' : ''); + // 追加文件内容 + if (fileContentParts.length > 0) { + const fileText = fileContentParts.join('\n\n'); + userContent = userContent ? `${userContent}\n\n${fileText}` : fileText; + } + if (!userContent && imageLocalPaths.length === 0) return; + + // ===== 异步模式:立即回执 + 后台执行 + 主动推送结果 ===== + const asyncMode = dingtalkConfig.asyncMode === true; + const proactiveTarget = isDirect + ? { userId: data.senderStaffId || data.senderId } + : { openConversationId: data.conversationId }; + + if (asyncMode) { + const ackText = dingtalkConfig.ackText || '🫡 任务已接收,处理中...'; + try { + await sendProactive(dingtalkConfig, proactiveTarget, ackText, { + msgType: 'text', + useAICard: false, + fallbackToNormal: true, + log, + }); + } catch (ackErr: any) { + log?.warn?.(`[DingTalk][Async] 回执发送失败: ${ackErr?.message || ackErr}`); + } + + // 计算 peerKind 和 peerId 用于 bindings 匹配 + const peerKind: 'direct' | 'group' = isDirect ? 'direct' : 'group'; + const peerId = senderId; + + let fullResponse = ''; + try { + for await (const chunk of streamFromGateway({ + userContent, + systemPrompts, + sessionKey: sessionContextJson, + gatewayAuth, + memoryUser, + imageLocalPaths: imageLocalPaths.length > 0 ? imageLocalPaths : undefined, + peerKind, + peerId, + gatewayPort: cfg.gateway?.port, + log, + }, accountId)) { + fullResponse += chunk; + } + + log?.info?.(`[DingTalk][Async] Gateway 完成,原始长度=${fullResponse.length}`); + + // 后处理01:上传本地图片到钉钉,替换 file:// 路径为 media_id + fullResponse = await processLocalImages(fullResponse, oapiToken, log); + + // 后处理02:提取视频标记并发送视频消息(主动 API) + const proactiveMediaTarget: AICardTarget = isDirect + ? { type: 'user', userId: data.senderStaffId || data.senderId } + : { type: 'group', openConversationId: data.conversationId }; + fullResponse = await processVideoMarkers(fullResponse, '', dingtalkConfig, oapiToken, log, true, proactiveMediaTarget); + + // 后处理03:提取音频标记并发送音频消息(主动 API) + fullResponse = await processAudioMarkers(fullResponse, '', dingtalkConfig, oapiToken, log, true, proactiveMediaTarget); + + // 后处理04:提取文件标记并发送独立文件消息(主动 API) + fullResponse = await processFileMarkers(fullResponse, '', dingtalkConfig, oapiToken, log, true, proactiveMediaTarget); + + const finalText = fullResponse.trim() || '✅ 任务执行完成(无文本输出)'; + await sendProactive(dingtalkConfig, proactiveTarget, finalText, { + msgType: 'markdown', + useAICard: false, + fallbackToNormal: true, + log, + }); + + log?.info?.(`[DingTalk][Async] 结果已主动推送,长度=${finalText.length}`); + } catch (err: any) { + const errMsg = `⚠️ 任务执行失败: ${err?.message || err}`; + log?.error?.(`[DingTalk][Async] ${errMsg}`); + try { + await sendProactive(dingtalkConfig, proactiveTarget, errMsg, { + msgType: 'text', + useAICard: false, + fallbackToNormal: true, + log, + }); + } catch (sendErr: any) { + log?.error?.(`[DingTalk][Async] 错误通知发送失败: ${sendErr?.message || sendErr}`); + } + } + + return; + } + + // 计算 peerKind 和 peerId 用于 bindings 匹配(在 asyncMode 外部定义,供所有分支使用) + const peerKind: 'direct' | 'group' = isDirect ? 'direct' : 'group'; + const peerId = senderId; + + // 尝试创建 AI Card + const card = await createAICard(dingtalkConfig, data, log); + + if (card) { + // ===== AI Card 流式模式 ===== + log?.info?.(`[DingTalk] AI Card 创建成功: ${card.cardInstanceId}`); + + let accumulated = ''; + let lastUpdateTime = 0; + const updateInterval = 300; // 最小更新间隔 ms + let chunkCount = 0; + + try { + log?.info?.(`[DingTalk] 开始请求 Gateway 流式接口...`); + for await (const chunk of streamFromGateway({ + userContent, + systemPrompts, + sessionKey: sessionContextJson, + gatewayAuth, + memoryUser, + imageLocalPaths: imageLocalPaths.length > 0 ? imageLocalPaths : undefined, + peerKind, + peerId, + gatewayPort: cfg.gateway?.port, + log, + }, accountId)) { + accumulated += chunk; + chunkCount++; + + if (chunkCount <= 3) { + log?.info?.(`[DingTalk] Gateway chunk #${chunkCount}: "${chunk.slice(0, 50)}..." (accumulated=${accumulated.length})`); + } + + // 节流更新,避免过于频繁 + const now = Date.now(); + if (now - lastUpdateTime >= updateInterval) { + // 实时清理文件、视频、音频标记(避免用户在流式过程中看到标记) + const displayContent = accumulated + .replace(FILE_MARKER_PATTERN, '') + .replace(VIDEO_MARKER_PATTERN, '') + .replace(AUDIO_MARKER_PATTERN, '') + .trim(); + await streamAICard(card, displayContent, false, log); + lastUpdateTime = now; + } + } + + log?.info?.(`[DingTalk] Gateway 流完成,共 ${chunkCount} chunks, ${accumulated.length} 字符`); + + // 后处理01:上传本地图片到钉钉,替换 file:// 路径为 media_id + log?.info?.(`[DingTalk][Media] 开始图片后处理,内容片段="${accumulated.slice(0, 200)}..."`); + accumulated = await processLocalImages(accumulated, oapiToken, log); + + // 【关键修复】AI Card 场景使用主动消息 API 发送文件/视频,避免 sessionWebhook 失效问题 + // 构建目标信息用于主动 API(isDirect 已在上面定义) + const proactiveTarget: AICardTarget = isDirect + ? { type: 'user', userId: data.senderStaffId || data.senderId } + : { type: 'group', openConversationId: data.conversationId }; + + // 后处理02:提取视频标记并发送视频消息(使用主动消息 API) + log?.info?.(`[DingTalk][Video] 开始视频后处理 (使用主动API)`); + accumulated = await processVideoMarkers(accumulated, '', dingtalkConfig, oapiToken, log, true, proactiveTarget); + + // 后处理03:提取音频标记并发送音频消息(使用主动消息 API) + log?.info?.(`[DingTalk][Audio] 开始音频后处理 (使用主动API)`); + accumulated = await processAudioMarkers(accumulated, '', dingtalkConfig, oapiToken, log, true, proactiveTarget); + + // 后处理04:提取文件标记并发送独立文件消息(使用主动消息 API) + log?.info?.(`[DingTalk][File] 开始文件后处理 (使用主动API,目标=${JSON.stringify(proactiveTarget)})`); + accumulated = await processFileMarkers(accumulated, sessionWebhook, dingtalkConfig, oapiToken, log, true, proactiveTarget); + + // 完成 AI Card(如果内容为空,说明是纯媒体消息,使用默认提示) + const finalContent = accumulated.trim(); + if (finalContent.length === 0) { + log?.info?.(`[DingTalk][AICard] 内容为空(纯媒体消息),使用默认提示`); + await finishAICard(card, '✅ 媒体已发送', log); + } else { + await finishAICard(card, finalContent, log); + } + log?.info?.(`[DingTalk] 流式响应完成,共 ${finalContent.length} 字符`); + + } catch (err: any) { + log?.error?.(`[DingTalk] Gateway 调用失败: ${err.message}`); + log?.error?.(`[DingTalk] 错误详情: ${err.stack}`); + accumulated += `\n\n⚠️ 响应中断: ${err.message}`; + try { + await finishAICard(card, accumulated, log); + } catch (finishErr: any) { + log?.error?.(`[DingTalk] 错误恢复 finish 也失败: ${finishErr.message}`); + } + } + + } else { + // ===== 降级:普通消息模式 ===== + log?.warn?.(`[DingTalk] AI Card 创建失败,降级为普通消息`); + + let fullResponse = ''; + try { + for await (const chunk of streamFromGateway({ + userContent, + systemPrompts, + sessionKey: sessionContextJson, + gatewayAuth, + memoryUser, + imageLocalPaths: imageLocalPaths.length > 0 ? imageLocalPaths : undefined, + peerKind, + peerId, + gatewayPort: cfg.gateway?.port, + log, + }, accountId)) { + fullResponse += chunk; + } + + // 后处理01:上传本地图片到钉钉,替换 file:// 路径为 media_id + log?.info?.(`[DingTalk][Media] (降级模式) 开始图片后处理,内容片段="${fullResponse.slice(0, 200)}..."`); + fullResponse = await processLocalImages(fullResponse, oapiToken, log); + + // 后处理02:提取视频标记并发送视频消息 + log?.info?.(`[DingTalk][Video] (降级模式) 开始视频后处理`); + fullResponse = await processVideoMarkers(fullResponse, sessionWebhook, dingtalkConfig, oapiToken, log); + + // 后处理03:提取音频标记并发送音频消息 + log?.info?.(`[DingTalk][Audio] (降级模式) 开始音频后处理`); + fullResponse = await processAudioMarkers(fullResponse, sessionWebhook, dingtalkConfig, oapiToken, log); + + // 后处理04:提取文件标记并发送独立文件消息 + log?.info?.(`[DingTalk][File] (降级模式) 开始文件后处理`); + fullResponse = await processFileMarkers(fullResponse, sessionWebhook, dingtalkConfig, oapiToken, log); + + await sendMessage(dingtalkConfig, sessionWebhook, fullResponse || '(无响应)', { + atUserId: !isDirect ? senderId : null, + useMarkdown: true, + }); + log?.info?.(`[DingTalk] 普通消息回复完成,共 ${fullResponse.length} 字符`); + + } catch (err: any) { + log?.error?.(`[DingTalk] Gateway 调用失败: ${err.message}`); + await sendMessage(dingtalkConfig, sessionWebhook, `抱歉,处理请求时出错: ${err.message}`, { + atUserId: !isDirect ? senderId : null, + }); + } + } +} + +// ============ 钉钉文档 API ============ + +/** 文档信息接口 */ +interface DocInfo { + docId: string; + title: string; + docType: string; + creatorId?: string; + updatedAt?: string; +} + +/** 文档内容块 */ +interface DocBlock { + blockId: string; + blockType: string; + text?: string; + children?: DocBlock[]; +} + +/** + * 钉钉文档客户端 + * 支持读写钉钉在线文档(文档、表格等) + */ +class DingtalkDocsClient { + private config: any; + private log?: any; + + constructor(config: any, log?: any) { + this.config = config; + this.log = log; + } + + /** 获取带鉴权的请求头 */ + private async getHeaders(): Promise> { + const token = await getAccessToken(this.config); + return { + 'x-acs-dingtalk-access-token': token, + 'Content-Type': 'application/json', + }; + } + + /** + * 获取文档元信息 + * @param spaceId 空间 ID + * @param docId 文档 ID + */ + async getDocInfo(spaceId: string, docId: string): Promise { + try { + const headers = await this.getHeaders(); + this.log?.info?.(`[DingTalk][Docs] 获取文档信息: spaceId=${spaceId}, docId=${docId}`); + + const resp = await axios.get( + `${DINGTALK_API}/v1.0/doc/spaces/${spaceId}/docs/${docId}`, + { headers, timeout: 10_000 }, + ); + + const data = resp.data; + this.log?.info?.(`[DingTalk][Docs] 文档信息获取成功: title=${data?.title}`); + + return { + docId: data.docId || docId, + title: data.title || '', + docType: data.docType || 'unknown', + creatorId: data.creatorId, + updatedAt: data.updatedAt, + }; + } catch (err: any) { + this.log?.error?.(`[DingTalk][Docs] 获取文档信息失败: ${err.message}`); + return null; + } + } + + /** + * 读取文档内容(通过 v2.0/wiki 节点 API) + * @param nodeId 知识库节点 ID + * @param operatorId 操作者 unionId(必须) + */ + async readDoc(nodeId: string, operatorId?: string): Promise { + try { + const headers = await this.getHeaders(); + this.log?.info?.(`[DingTalk][Docs] 读取知识库节点: nodeId=${nodeId}, operatorId=${operatorId}`); + + if (!operatorId) { + this.log?.error?.('[DingTalk][Docs] readDoc 需要 operatorId(unionId)'); + return null; + } + + const resp = await axios.get( + `${DINGTALK_API}/v2.0/wiki/nodes/${nodeId}`, + { headers, params: { operatorId }, timeout: 15_000 }, + ); + + const node = resp.data?.node || resp.data; + const name = node.name || '未知文档'; + const category = node.category || 'unknown'; + const url = node.url || ''; + const workspaceId = node.workspaceId || ''; + + const content = [ + `文档名: ${name}`, + `类型: ${category}`, + `URL: ${url}`, + `工作区: ${workspaceId}`, + ].join('\n'); + + this.log?.info?.(`[DingTalk][Docs] 节点信息获取成功: name=${name}, category=${category}`); + return content; + } catch (err: any) { + this.log?.error?.(`[DingTalk][Docs] 读取节点失败: ${err.message}`); + if (err.response) { + this.log?.error?.(`[DingTalk][Docs] 错误详情: status=${err.response.status} data=${JSON.stringify(err.response.data)}`); + } + return null; + } + } + + /** + * 从 block 树中递归提取纯文本内容 + */ + private extractTextFromBlocks(blocks: DocBlock[]): string[] { + const result: string[] = []; + for (const block of blocks) { + if (block.text) { + result.push(block.text); + } + if (block.children && block.children.length > 0) { + result.push(...this.extractTextFromBlocks(block.children)); + } + } + return result; + } + + /** + * 向文档追加内容 + * @param docId 文档 ID + * @param content 要追加的文本内容 + * @param index 插入位置(-1 表示末尾) + */ + async appendToDoc( + docId: string, + content: string, + index: number = -1, + ): Promise { + try { + const headers = await this.getHeaders(); + this.log?.info?.(`[DingTalk][Docs] 向文档追加内容: docId=${docId}, contentLen=${content.length}`); + + const body = { + blockType: 'PARAGRAPH', + body: { + text: content, + }, + index, + }; + + await axios.post( + `${DINGTALK_API}/v1.0/doc/documents/${docId}/blocks/root/children`, + body, + { headers, timeout: 10_000 }, + ); + + this.log?.info?.(`[DingTalk][Docs] 内容追加成功`); + return true; + } catch (err: any) { + this.log?.error?.(`[DingTalk][Docs] 追加内容失败: ${err.message}`); + if (err.response) { + this.log?.error?.(`[DingTalk][Docs] 错误详情: status=${err.response.status} data=${JSON.stringify(err.response.data)}`); + } + return false; + } + } + + /** + * 创建新文档 + * @param spaceId 空间 ID + * @param title 文档标题 + * @param content 初始内容(可选) + */ + async createDoc( + spaceId: string, + title: string, + content?: string, + ): Promise { + try { + const headers = await this.getHeaders(); + this.log?.info?.(`[DingTalk][Docs] 创建文档: spaceId=${spaceId}, title=${title}`); + + const body: any = { + spaceId, + parentDentryId: '', + name: title, + docType: 'alidoc', + }; + + const resp = await axios.post( + `${DINGTALK_API}/v1.0/doc/spaces/${spaceId}/docs`, + body, + { headers, timeout: 10_000 }, + ); + + const data = resp.data; + this.log?.info?.(`[DingTalk][Docs] 文档创建成功: docId=${data?.docId}`); + + const docInfo: DocInfo = { + docId: data.docId || data.dentryUuid || '', + title: title, + docType: data.docType || 'alidoc', + }; + + // 如果有初始内容,追加到文档 + if (content && docInfo.docId) { + await this.appendToDoc(docInfo.docId, content); + } + + return docInfo; + } catch (err: any) { + this.log?.error?.(`[DingTalk][Docs] 创建文档失败: ${err.message}`); + if (err.response) { + this.log?.error?.(`[DingTalk][Docs] 错误详情: status=${err.response.status} data=${JSON.stringify(err.response.data)}`); + } + return null; + } + } + + /** + * 搜索文档 + * @param keyword 搜索关键词 + * @param spaceId 空间 ID(可选,不填则搜索所有空间) + */ + async searchDocs( + keyword: string, + spaceId?: string, + ): Promise { + try { + const headers = await this.getHeaders(); + this.log?.info?.(`[DingTalk][Docs] 搜索文档: keyword=${keyword}, spaceId=${spaceId || '全部'}`); + + const body: any = { keyword, maxResults: 20 }; + if (spaceId) body.spaceId = spaceId; + + const resp = await axios.post( + `${DINGTALK_API}/v1.0/doc/docs/search`, + body, + { headers, timeout: 10_000 }, + ); + + const items = resp.data?.items || []; + const docs: DocInfo[] = items.map((item: any) => ({ + docId: item.docId || item.dentryUuid || '', + title: item.name || item.title || '', + docType: item.docType || 'unknown', + creatorId: item.creatorId, + updatedAt: item.updatedAt, + })); + + this.log?.info?.(`[DingTalk][Docs] 搜索到 ${docs.length} 个文档`); + return docs; + } catch (err: any) { + this.log?.error?.(`[DingTalk][Docs] 搜索文档失败: ${err.message}`); + return []; + } + } + + /** + * 列出空间下的文档 + * @param spaceId 空间 ID + * @param parentId 父目录 ID(可选,不填则列出根目录) + */ + async listDocs( + spaceId: string, + parentId?: string, + ): Promise { + try { + const headers = await this.getHeaders(); + this.log?.info?.(`[DingTalk][Docs] 列出文档: spaceId=${spaceId}, parentId=${parentId || '根目录'}`); + + const params: any = { maxResults: 50 }; + if (parentId) params.parentDentryId = parentId; + + const resp = await axios.get( + `${DINGTALK_API}/v1.0/doc/spaces/${spaceId}/dentries`, + { headers, params, timeout: 10_000 }, + ); + + const items = resp.data?.items || []; + const docs: DocInfo[] = items.map((item: any) => ({ + docId: item.dentryUuid || item.docId || '', + title: item.name || '', + docType: item.docType || item.dentryType || 'unknown', + creatorId: item.creatorId, + updatedAt: item.updatedAt, + })); + + this.log?.info?.(`[DingTalk][Docs] 列出 ${docs.length} 个文档/目录`); + return docs; + } catch (err: any) { + this.log?.error?.(`[DingTalk][Docs] 列出文档失败: ${err.message}`); + return []; + } + } +} + +// ============ 插件定义 ============ + +const meta = { + id: 'dingtalk-connector', + label: 'DingTalk', + selectionLabel: 'DingTalk (钉钉)', + docsPath: '/channels/dingtalk-connector', + docsLabel: 'dingtalk-connector', + blurb: '钉钉企业内部机器人,使用 Stream 模式,无需公网 IP,支持 AI Card 流式响应。', + order: 70, + aliases: ['dd', 'ding'], +}; + +const dingtalkPlugin = { + id: 'dingtalk-connector', + meta, + capabilities: { + chatTypes: ['direct', 'group'], + reactions: false, + threads: false, + media: true, + nativeCommands: false, + blockStreaming: false, + }, + reload: { configPrefixes: ['channels.dingtalk-connector'] }, + configSchema: { + schema: { + type: 'object', + additionalProperties: false, + properties: { + enabled: { type: 'boolean', default: true }, + clientId: { type: 'string', description: 'DingTalk App Key (Client ID)' }, + clientSecret: { type: 'string', description: 'DingTalk App Secret (Client Secret)' }, + enableMediaUpload: { type: 'boolean', default: true, description: 'Enable media upload prompt injection' }, + systemPrompt: { type: 'string', default: '', description: 'Custom system prompt' }, + dmPolicy: { type: 'string', enum: ['open', 'pairing', 'allowlist'], default: 'open' }, + allowFrom: { type: 'array', items: { type: 'string' }, description: 'Allowed sender IDs' }, + groupPolicy: { type: 'string', enum: ['open', 'allowlist'], default: 'open' }, + gatewayToken: { type: 'string', default: '', description: 'Gateway auth token (Bearer)' }, + gatewayPassword: { type: 'string', default: '', description: 'Gateway auth password (alternative to token)' }, + sessionTimeout: { type: 'number', default: 1800000, description: 'Session timeout in ms (default 30min)' }, + separateSessionByConversation: { type: 'boolean', default: true, description: '是否按单聊/群聊/群区分 session' }, + sharedMemoryAcrossConversations: { type: 'boolean', default: false, description: '单 agent 场景下是否共享记忆;false 时不同群聊、群聊与私聊记忆隔离' }, + asyncMode: { type: 'boolean', default: false, description: 'Send immediate ack and push final result as a second message' }, + ackText: { type: 'string', default: '🫡 任务已接收,处理中...', description: 'Ack text when asyncMode is enabled' }, + debug: { type: 'boolean', default: false }, + }, + required: ['clientId', 'clientSecret'], + }, + uiHints: { + enabled: { label: 'Enable DingTalk' }, + clientId: { label: 'App Key', sensitive: false }, + clientSecret: { label: 'App Secret', sensitive: true }, + dmPolicy: { label: 'DM Policy' }, + groupPolicy: { label: 'Group Policy' }, + }, + }, + config: { + listAccountIds: (cfg: ClawdbotConfig) => { + const config = getConfig(cfg); + // __default__ 是内部标记,表示使用顶层配置(单账号模式) + return config.accounts + ? Object.keys(config.accounts) + : (isConfigured(cfg) ? ['__default__'] : []); + }, + resolveAccount: (cfg: ClawdbotConfig, accountId?: string) => { + const config = getConfig(cfg); + const id = accountId || DEFAULT_ACCOUNT_ID; + if (config.accounts?.[id]) { + return { accountId: id, config: config.accounts[id], enabled: config.accounts[id].enabled !== false }; + } + // 没有 accounts 配置或找不到指定账号时,使用顶层配置 + return { accountId: DEFAULT_ACCOUNT_ID, config, enabled: config.enabled !== false }; + }, + defaultAccountId: () => '__default__', + isConfigured: (account: any) => Boolean(account.config?.clientId && account.config?.clientSecret), + describeAccount: (account: any) => ({ + accountId: account.accountId, + name: account.config?.name || 'DingTalk', + enabled: account.enabled, + configured: Boolean(account.config?.clientId), + }), + }, + security: { + resolveDmPolicy: ({ account }: any) => ({ + policy: account.config?.dmPolicy || 'open', + allowFrom: account.config?.allowFrom || [], + policyPath: 'channels.dingtalk-connector.dmPolicy', + allowFromPath: 'channels.dingtalk-connector.allowFrom', + approveHint: '使用 /allow dingtalk-connector: 批准用户', + normalizeEntry: (raw: string) => raw.replace(/^(dingtalk-connector|dingtalk|dd|ding):/i, ''), + }), + }, + groups: { + resolveRequireMention: ({ cfg }: any) => getConfig(cfg).groupPolicy !== 'open', + }, + messaging: { + // 注意:normalizeTarget 接收字符串,返回字符串(保持大小写,因为 openConversationId 是 base64 编码) + normalizeTarget: (raw: string) => { + if (!raw) return undefined; + // 去掉渠道前缀,但保持原始大小写 + return raw.trim().replace(/^(dingtalk-connector|dingtalk|dd|ding):/i, ''); + }, + targetResolver: { + // 支持普通 ID、Base64 编码的 conversationId,以及 user:/group: 前缀格式 + looksLikeId: (id: string) => /^(user:|group:)?[\w+/=-]+$/.test(id), + hint: 'user: 或 group:', + }, + }, + outbound: { + deliveryMode: 'direct' as const, + textChunkLimit: 4000, + /** + * 主动发送文本消息 + * @param ctx.to 目标格式:user: 或 group: + * @param ctx.text 消息内容 + * @param ctx.accountId 账号 ID + */ + sendText: async (ctx: any) => { + const { cfg, to, text, accountId, log } = ctx; + const account = dingtalkPlugin.config.resolveAccount(cfg, accountId); + const config = account?.config; + + if (!config?.clientId || !config?.clientSecret) { + throw new Error('DingTalk not configured'); + } + + if (!to) { + throw new Error('Target is required. Format: user: or group:'); + } + + // 解析目标:user: 或 group: + const targetStr = String(to); + let result: SendResult; + + log?.info?.(`[DingTalk][outbound.sendText] 解析目标: targetStr="${targetStr}"`); + + if (targetStr.startsWith('user:')) { + const userId = targetStr.slice(5); + log?.info?.(`[DingTalk][outbound.sendText] 发送给用户: userId="${userId}"`); + result = await sendToUser(config, userId, text, { log }); + } else if (targetStr.startsWith('group:')) { + const openConversationId = targetStr.slice(6); + log?.info?.(`[DingTalk][outbound.sendText] 发送到群: openConversationId="${openConversationId}"`); + result = await sendToGroup(config, openConversationId, text, { log }); + } else { + // 默认当作 userId 处理 + log?.info?.(`[DingTalk][outbound.sendText] 默认发送给用户: userId="${targetStr}"`); + result = await sendToUser(config, targetStr, text, { log }); + } + + if (result.ok) { + return { channel: 'dingtalk-connector', messageId: result.processQueryKey || 'unknown' }; + } + throw new Error(result.error || 'Failed to send message'); + }, + /** + * 主动发送媒体消息(图片) + * @param ctx.to 目标格式:user: 或 group: + * @param ctx.text 消息文本/标题 + * @param ctx.mediaUrl 媒体 URL(钉钉仅支持图片 URL) + * @param ctx.accountId 账号 ID + */ + sendMedia: async (ctx: any) => { + const { cfg, to, text, mediaUrl, accountId, log } = ctx; + const account = dingtalkPlugin.config.resolveAccount(cfg, accountId); + const config = account?.config; + + if (!config?.clientId || !config?.clientSecret) { + throw new Error('DingTalk not configured'); + } + + if (!to) { + throw new Error('Target is required. Format: user: or group:'); + } + + // 解析目标 + const targetStr = String(to); + let result: SendResult; + + // 如果有媒体 URL,发送图片消息 + if (mediaUrl) { + if (targetStr.startsWith('user:')) { + const userId = targetStr.slice(5); + result = await sendToUser(config, userId, mediaUrl, { msgType: 'image', log }); + } else if (targetStr.startsWith('group:')) { + const openConversationId = targetStr.slice(6); + result = await sendToGroup(config, openConversationId, mediaUrl, { msgType: 'image', log }); + } else { + result = await sendToUser(config, targetStr, mediaUrl, { msgType: 'image', log }); + } + } else { + // 无媒体,发送文本 + if (targetStr.startsWith('user:')) { + const userId = targetStr.slice(5); + result = await sendToUser(config, userId, text || '', { log }); + } else if (targetStr.startsWith('group:')) { + const openConversationId = targetStr.slice(6); + result = await sendToGroup(config, openConversationId, text || '', { log }); + } else { + result = await sendToUser(config, targetStr, text || '', { log }); + } + } + + if (result.ok) { + return { channel: 'dingtalk-connector', messageId: result.processQueryKey || 'unknown' }; + } + throw new Error(result.error || 'Failed to send media'); + }, + }, + gateway: { + startAccount: async (ctx: any) => { + const { account, cfg, abortSignal } = ctx; + const config = account.config; + + if (!config.clientId || !config.clientSecret) { + throw new Error('DingTalk clientId and clientSecret are required'); + } + + ctx.log?.info(`[${account.accountId}] 启动钉钉 Stream 客户端...`); + + // 启用 DWClient 内置的 autoReconnect 和 keepAlive + // - autoReconnect: 连接断开时自动重连 + // - keepAlive: 启用心跳机制,防止服务端因长时间无活动而断开连接 + const client = new DWClient({ + clientId: config.clientId, + clientSecret: config.clientSecret, + debug: config.debug || false, + autoReconnect: true, + keepAlive: true, + } as any); + + client.registerCallbackListener(TOPIC_ROBOT, async (res: any) => { + const messageId = res.headers?.messageId; + ctx.log?.info?.(`[DingTalk] 收到 Stream 回调, messageId=${messageId}, headers=${JSON.stringify(res.headers)}`); + + // 【关键修复】立即确认回调,避免钉钉服务器因超时而重发 + // 钉钉 Stream 模式要求及时响应,否则约60秒后会重发消息 + if (messageId) { + client.socketCallBackResponse(messageId, { success: true }); + ctx.log?.info?.(`[DingTalk] 已立即确认回调: messageId=${messageId}`); + } + + // 【消息去重】检查是否已处理过该消息 + if (messageId && isMessageProcessed(messageId)) { + ctx.log?.warn?.(`[DingTalk] 检测到重复消息,跳过处理: messageId=${messageId}`); + return; + } + + // 标记消息为已处理 + if (messageId) { + markMessageProcessed(messageId); + } + + // 异步处理消息(不阻塞回调确认) + try { + ctx.log?.info?.(`[DingTalk] 原始 data: ${typeof res.data === 'string' ? res.data.slice(0, 500) : JSON.stringify(res.data).slice(0, 500)}`); + const data = JSON.parse(res.data); + + await handleDingTalkMessage({ + cfg, + accountId: account.accountId, + data, + sessionWebhook: data.sessionWebhook, + log: ctx.log, + dingtalkConfig: config, + }); + } catch (error: any) { + ctx.log?.error?.(`[DingTalk] 处理消息异常: ${error.message}`); + // 注意:即使处理失败,也不需要再次响应(已经提前确认了) + } + }); + + await client.connect(); + ctx.log?.info(`[${account.accountId}] 钉钉 Stream 客户端已连接`); + + const rt = getRuntime(); + rt.channel.activity.record('dingtalk-connector', account.accountId, 'start'); + + let stopped = false; + + // 统一的停止逻辑 + const doStop = (reason: string) => { + if (stopped) return; + stopped = true; + ctx.log?.info(`[${account.accountId}] 停止钉钉 Stream 客户端 (${reason})...`); + try { + // 【关键】调用 disconnect() 正确关闭 WebSocket 连接 + client.disconnect(); + } catch (err: any) { + ctx.log?.warn?.(`[${account.accountId}] 断开连接时出错: ${err.message}`); + } + rt.channel.activity.record('dingtalk-connector', account.accountId, 'stop'); + }; + + // 【关键修复】返回一个 Promise 并保持 pending 状态直到 abortSignal 触发 + // 这样框架不会认为账号已退出,避免触发 auto-restart + // 参考:OpenClaw changelog - "keep startAccount pending until abort to prevent restart-loop storms" + return new Promise((resolve) => { + if (abortSignal) { + abortSignal.addEventListener('abort', () => { + doStop('abortSignal'); + resolve({ + stop: () => doStop('manual'), + isHealthy: () => !stopped, + }); + }); + } + }); + }, + }, + status: { + defaultRuntime: { accountId: DEFAULT_ACCOUNT_ID, running: false, lastStartAt: null, lastStopAt: null, lastError: null }, + probe: async ({ cfg }: any) => { + if (!isConfigured(cfg)) return { ok: false, error: 'Not configured' }; + try { + const config = getConfig(cfg); + await getAccessToken(config); + return { ok: true, details: { clientId: config.clientId } }; + } catch (error: any) { + return { ok: false, error: error.message }; + } + }, + buildChannelSummary: ({ snapshot }: any) => ({ + configured: snapshot?.configured ?? false, + running: snapshot?.running ?? false, + lastStartAt: snapshot?.lastStartAt ?? null, + lastStopAt: snapshot?.lastStopAt ?? null, + lastError: snapshot?.lastError ?? null, + }), + }, +}; + +// ============ 插件注册 ============ + +const plugin = { + id: 'dingtalk-connector', + name: 'DingTalk Channel', + description: 'DingTalk (钉钉) messaging channel via Stream mode with AI Card streaming', + configSchema: { + type: 'object', + additionalProperties: true, + properties: { enabled: { type: 'boolean', default: true } }, + }, + register(api: ClawdbotPluginApi) { + runtime = api.runtime; + api.registerChannel({ plugin: dingtalkPlugin }); + + // ===== Gateway Methods ===== + + api.registerGatewayMethod('dingtalk-connector.status', async ({ respond, cfg }: any) => { + const result = await dingtalkPlugin.status.probe({ cfg }); + respond(true, result); + }); + + api.registerGatewayMethod('dingtalk-connector.probe', async ({ respond, cfg }: any) => { + const result = await dingtalkPlugin.status.probe({ cfg }); + respond(result.ok, result); + }); + + /** + * 主动发送单聊消息 + * 参数: + * - userId / userIds: 目标用户 ID(支持单个或数组) + * - content: 消息内容 + * - msgType?: 'text' | 'markdown' | 'link' | 'actionCard' | 'image'(降级时使用,默认 text) + * - title?: markdown 消息标题 + * - useAICard?: 是否使用 AI Card(默认 true) + * - fallbackToNormal?: AI Card 失败时是否降级到普通消息(默认 true) + * - accountId?: 使用的账号 ID(可选,不传则使用默认配置) + */ + api.registerGatewayMethod('dingtalk-connector.sendToUser', async ({ respond, cfg, params, log }: any) => { + const { userId, userIds, content, msgType, title, useAICard, fallbackToNormal, accountId } = params || {}; + const account = dingtalkPlugin.config.resolveAccount(cfg, accountId); + + if (!account.config?.clientId) { + return respond(false, { error: 'DingTalk not configured' }); + } + + const targetUserIds = userIds || (userId ? [userId] : []); + if (targetUserIds.length === 0) { + return respond(false, { error: 'userId or userIds is required' }); + } + + if (!content) { + return respond(false, { error: 'content is required' }); + } + + const result = await sendToUser(account.config, targetUserIds, content, { + msgType, + title, + log, + useAICard: useAICard !== false, // 默认 true + fallbackToNormal: fallbackToNormal !== false, // 默认 true + }); + respond(result.ok, result); + }); + + /** + * 主动发送群聊消息 + * 参数: + * - openConversationId: 群会话 ID + * - content: 消息内容 + * - msgType?: 'text' | 'markdown' | 'link' | 'actionCard' | 'image'(降级时使用,默认 text) + * - title?: markdown 消息标题 + * - useAICard?: 是否使用 AI Card(默认 true) + * - fallbackToNormal?: AI Card 失败时是否降级到普通消息(默认 true) + * - accountId?: 使用的账号 ID(可选,不传则使用默认配置) + */ + api.registerGatewayMethod('dingtalk-connector.sendToGroup', async ({ respond, cfg, params, log }: any) => { + const { openConversationId, content, msgType, title, useAICard, fallbackToNormal, accountId } = params || {}; + const account = dingtalkPlugin.config.resolveAccount(cfg, accountId); + + if (!account.config?.clientId) { + return respond(false, { error: 'DingTalk not configured' }); + } + + if (!openConversationId) { + return respond(false, { error: 'openConversationId is required' }); + } + + if (!content) { + return respond(false, { error: 'content is required' }); + } + + const result = await sendToGroup(account.config, openConversationId, content, { + msgType, + title, + log, + useAICard: useAICard !== false, // 默认 true + fallbackToNormal: fallbackToNormal !== false, + }); + respond(result.ok, result); + }); + + /** + * 智能发送消息(自动检测目标类型和消息格式) + * 参数: + * - target: 目标(user: 或 group:) + * - content: 消息内容 + * - msgType?: 消息类型(降级时使用,可选,不指定则自动检测) + * - title?: 标题(用于 markdown) + * - useAICard?: 是否使用 AI Card(默认 true) + * - fallbackToNormal?: AI Card 失败时是否降级到普通消息(默认 true) + * - accountId?: 账号 ID + */ + api.registerGatewayMethod('dingtalk-connector.send', async ({ respond, cfg, params, log }: any) => { + const { target, content, message, msgType, title, useAICard, fallbackToNormal, accountId } = params || {}; + const actualContent = content || message; // 兼容 message 字段 + const account = dingtalkPlugin.config.resolveAccount(cfg, accountId); + + log?.info?.(`[DingTalk][Send] 收到请求: params=${JSON.stringify(params)}`); + + if (!account.config?.clientId) { + return respond(false, { error: 'DingTalk not configured' }); + } + + if (!target) { + return respond(false, { error: 'target is required (format: user: or group:)' }); + } + + if (!actualContent) { + return respond(false, { error: 'content is required' }); + } + + const targetStr = String(target); + let sendTarget: { userId?: string; openConversationId?: string }; + + if (targetStr.startsWith('user:')) { + sendTarget = { userId: targetStr.slice(5) }; + } else if (targetStr.startsWith('group:')) { + sendTarget = { openConversationId: targetStr.slice(6) }; + } else { + // 默认当作 userId + sendTarget = { userId: targetStr }; + } + + log?.info?.(`[DingTalk][Send] 解析后目标: sendTarget=${JSON.stringify(sendTarget)}`); + + const result = await sendProactive(account.config, sendTarget, actualContent, { + msgType, + title, + log, + useAICard: useAICard !== false, // 默认 true + fallbackToNormal: fallbackToNormal !== false, + }); + respond(result.ok, result); + }); + + // ===== 文档 API Methods ===== + + /** + * 读取钉钉知识库文档节点信息 + * 参数: + * - docId: 知识库节点 ID + * - operatorId: 操作者 unionId 或 staffId(会自动转换为 unionId) + * - accountId?: 账号 ID + */ + api.registerGatewayMethod('dingtalk-connector.docs.read', async ({ respond, cfg, params, log }: any) => { + const { docId, operatorId: rawOperatorId, accountId } = params || {}; + const account = dingtalkPlugin.config.resolveAccount(cfg, accountId); + + if (!account.config?.clientId) { + return respond(false, { error: 'DingTalk not configured' }); + } + if (!docId) { + return respond(false, { error: 'docId is required' }); + } + if (!rawOperatorId) { + return respond(false, { error: 'operatorId (unionId or staffId) is required' }); + } + + // 如果 operatorId 不像 unionId(通常以字母数字开头且较长),尝试将 staffId 转为 unionId + let operatorId = rawOperatorId; + if (!rawOperatorId.includes('$')) { + // 可能已经是 unionId,直接使用;否则尝试转换 + const resolved = await getUnionId(rawOperatorId, account.config, log); + if (resolved) operatorId = resolved; + } + + const client = new DingtalkDocsClient(account.config, log); + const content = await client.readDoc(docId, operatorId); + + if (content !== null) { + respond(true, { content }); + } else { + respond(false, { error: 'Failed to read document node' }); + } + }); + + /** + * 创建钉钉文档 + * 参数: + * - spaceId: 空间 ID + * - title: 文档标题 + * - content?: 初始内容 + * - accountId?: 账号 ID + */ + api.registerGatewayMethod('dingtalk-connector.docs.create', async ({ respond, cfg, params, log }: any) => { + const { spaceId, title, content, accountId } = params || {}; + const account = dingtalkPlugin.config.resolveAccount(cfg, accountId); + + if (!account.config?.clientId) { + return respond(false, { error: 'DingTalk not configured' }); + } + if (!spaceId || !title) { + return respond(false, { error: 'spaceId and title are required' }); + } + + const client = new DingtalkDocsClient(account.config, log); + const doc = await client.createDoc(spaceId, title, content); + + if (doc) { + respond(true, doc); + } else { + respond(false, { error: 'Failed to create document' }); + } + }); + + /** + * 向钉钉文档追加内容 + * 参数: + * - docId: 文档 ID + * - content: 要追加的内容 + * - accountId?: 账号 ID + */ + api.registerGatewayMethod('dingtalk-connector.docs.append', async ({ respond, cfg, params, log }: any) => { + const { docId, content, accountId } = params || {}; + const account = dingtalkPlugin.config.resolveAccount(cfg, accountId); + + if (!account.config?.clientId) { + return respond(false, { error: 'DingTalk not configured' }); + } + if (!docId || !content) { + return respond(false, { error: 'docId and content are required' }); + } + + const client = new DingtalkDocsClient(account.config, log); + const ok = await client.appendToDoc(docId, content); + respond(ok, ok ? { success: true } : { error: 'Failed to append to document' }); + }); + + /** + * 搜索钉钉文档 + * 参数: + * - keyword: 搜索关键词 + * - spaceId?: 空间 ID(可选) + * - accountId?: 账号 ID + */ + api.registerGatewayMethod('dingtalk-connector.docs.search', async ({ respond, cfg, params, log }: any) => { + const { keyword, spaceId, accountId } = params || {}; + const account = dingtalkPlugin.config.resolveAccount(cfg, accountId); + + if (!account.config?.clientId) { + return respond(false, { error: 'DingTalk not configured' }); + } + if (!keyword) { + return respond(false, { error: 'keyword is required' }); + } + + const client = new DingtalkDocsClient(account.config, log); + const docs = await client.searchDocs(keyword, spaceId); + respond(true, { docs }); + }); + + /** + * 列出空间下的文档 + * 参数: + * - spaceId: 空间 ID + * - parentId?: 父目录 ID(可选) + * - accountId?: 账号 ID + */ + api.registerGatewayMethod('dingtalk-connector.docs.list', async ({ respond, cfg, params, log }: any) => { + const { spaceId, parentId, accountId } = params || {}; + const account = dingtalkPlugin.config.resolveAccount(cfg, accountId); + + if (!account.config?.clientId) { + return respond(false, { error: 'DingTalk not configured' }); + } + if (!spaceId) { + return respond(false, { error: 'spaceId is required' }); + } + + const client = new DingtalkDocsClient(account.config, log); + const docs = await client.listDocs(spaceId, parentId); + respond(true, { docs }); + }); + + api.logger?.info('[DingTalk] 插件已注册(支持主动发送 AI Card 消息、文档读写)'); + }, +}; + +export default plugin; +export { + dingtalkPlugin, + // 回复消息(需要 sessionWebhook) + sendMessage, + sendTextMessage, + sendMarkdownMessage, + // 主动发送消息(无需 sessionWebhook) + sendToUser, + sendToGroup, + sendProactive, + // 钉钉文档客户端 + DingtalkDocsClient, +}; diff --git a/.flocks/plugins/channels/dingtalk/dingtalk.py b/.flocks/plugins/channels/dingtalk/dingtalk.py new file mode 100644 index 0000000..09aa59e --- /dev/null +++ b/.flocks/plugins/channels/dingtalk/dingtalk.py @@ -0,0 +1,249 @@ +""" +DingTalk ChannelPlugin for flocks. + +Launches runner.ts (via npm) as a subprocess. runner.ts constructs a minimal +OpenClaw runtime shim that drives plugin.ts's DWClient WebSocket connection +to DingTalk. All AI inference requests are served through flocks's +POST /v1/chat/completions endpoint. + +Location: + .flocks/plugins/channels/dingtalk/dingtalk.py + +Directory layout: + dingtalk/ + ├── dingtalk.py ← this file (auto-loaded by flocks) + ├── runner.ts ← Node.js bridge layer (no modification needed) + └── dingtalk-openclaw-connector/ + └── plugin.ts ← original connector (no modification needed) + +flocks.json configuration example: + { + "channels": { + "dingtalk": { + "enabled": true, + "clientId": "dingXXXXXX", + "clientSecret": "your_secret", + "defaultAgent": "rex" + } + } + } + +Optional extra fields (passed through to plugin.ts): + gatewayToken Bearer auth token (usually not needed; flocks has no local auth) + debug true/false, enables plugin.ts debug logging + separateSessionByConversation true (default) + groupSessionScope "group" (default) / "group_sender" + sharedMemoryAcrossConversations false (default) + dmPolicy "open" (default) / "allowlist" + allowFrom list of allowed senderStaffId values +""" + +from __future__ import annotations + +import asyncio +import os +import subprocess +import sys +from pathlib import Path +from typing import Any, Awaitable, Callable, Optional + +from flocks.channel.base import ( + ChannelCapabilities, + ChannelMeta, + ChannelPlugin, + ChatType, + DeliveryResult, + InboundMessage, + OutboundContext, +) +from flocks.utils.log import Log + +log = Log.create(service="channel.dingtalk") + +# Directory containing runner.ts (same level as this file) +_PLUGIN_DIR = Path(__file__).parent +_RUNNER_TS = _PLUGIN_DIR / "runner.ts" +_CONNECTOR_DIR = _PLUGIN_DIR / "dingtalk-openclaw-connector" +_CONNECTOR_PACKAGE = _CONNECTOR_DIR / "package.json" + + +def _find_npm() -> str: + """Return the npm executable path, raising if not found.""" + if npm := os.environ.get("NPM_PATH"): + return npm + + import shutil + + for candidate in ("npm", "npm.cmd"): + if npm := shutil.which(candidate): + return npm + + raise RuntimeError( + "npm not found. Please install Node.js (which includes npm) or set the NPM_PATH environment variable." + ) + + +class DingTalkChannel(ChannelPlugin): + """DingTalk channel — bridges to plugin.ts via a runner.ts subprocess.""" + + def __init__(self) -> None: + super().__init__() + self._proc: Optional[subprocess.Popen] = None + self._monitor_task: Optional[asyncio.Task] = None + + # ── Metadata ────────────────────────────────────────────────────────────── + + def meta(self) -> ChannelMeta: + return ChannelMeta( + id="dingtalk", + label="DingTalk", + aliases=["dingding", "dingtalk-connector"], + order=30, + ) + + def capabilities(self) -> ChannelCapabilities: + return ChannelCapabilities( + chat_types=[ChatType.DIRECT, ChatType.GROUP], + media=True, + threads=False, + reactions=False, + edit=False, + rich_text=True, + ) + + def validate_config(self, config: dict) -> Optional[str]: + for key in ("clientId", "clientSecret"): + if not config.get(key): + return f"Missing required config field: {key}" + if not _RUNNER_TS.exists(): + return f"runner.ts not found: {_RUNNER_TS}" + if not _CONNECTOR_PACKAGE.exists(): + return f"package.json not found: {_CONNECTOR_PACKAGE}" + return None + + # ── Lifecycle ───────────────────────────────────────────────────────────── + + async def start( + self, + config: dict, + on_message: Callable[[InboundMessage], Awaitable[None]], + abort_event: Optional[asyncio.Event] = None, + ) -> None: + """Start the runner.ts subprocess and monitor it until abort_event fires.""" + self._config = config + self._on_message = on_message + + npm = _find_npm() + flocks_port = self._get_flocks_port() + + env = { + **os.environ, + "DINGTALK_CLIENT_ID": config.get("clientId", ""), + "DINGTALK_CLIENT_SECRET": config.get("clientSecret", ""), + "FLOCKS_PORT": str(flocks_port), + "FLOCKS_AGENT": config.get("defaultAgent", ""), + "FLOCKS_GATEWAY_TOKEN": config.get("gatewayToken", ""), + "DINGTALK_DEBUG": "true" if config.get("debug") else "false", + "DINGTALK_ACCOUNT_ID": config.get("_account_id", "__default__"), + } + + log.info("dingtalk.start", { + "runner": str(_RUNNER_TS), + "flocks_port": flocks_port, + "client_id": config.get("clientId", ""), + }) + + self._start_process(npm, env) + self.mark_connected() + + # Monitor subprocess until abort_event is set + self._monitor_task = asyncio.create_task( + self._monitor(abort_event) + ) + await self._monitor_task + + async def stop(self) -> None: + if self._monitor_task and not self._monitor_task.done(): + self._monitor_task.cancel() + self._kill_process() + self.mark_disconnected() + + # ── Outbound messages ───────────────────────────────────────────────────── + # plugin.ts replies to DingTalk directly via sessionWebhook; flocks does not + # need to route through send_text. This method is required by the framework + # and is kept as a placeholder for proactive push support. + + async def send_text(self, ctx: OutboundContext) -> DeliveryResult: + """ + Proactively push a text message (for agent-initiated DingTalk messages). + Passive replies from plugin.ts go through sessionWebhook and bypass this path. + Reserved for future extension; currently returns not-supported. + """ + log.warning("dingtalk.send_text.not_implemented", { + "to": ctx.to, + "hint": "Proactive push requires the dingtalk-connector.send GatewayMethod", + }) + return DeliveryResult( + channel_id="dingtalk", + message_id="", + success=False, + error="Proactive push not yet implemented; plugin.ts passive replies go through sessionWebhook", + ) + + # ── Internal methods ────────────────────────────────────────────────────── + + def _get_flocks_port(self) -> int: + """Get the flocks HTTP port from the environment variable or fall back to the default.""" + return int(os.environ.get("FLOCKS_PORT", "8000")) + + def _start_process(self, npm: str, env: dict) -> None: + """Start the runner.ts subprocess.""" + self._proc = subprocess.Popen( + [npm, "run", "start:runner"], + cwd=str(_CONNECTOR_DIR), + env=env, + stdout=sys.stdout, + stderr=sys.stderr, + ) + log.info("dingtalk.process.started", {"pid": self._proc.pid}) + + def _kill_process(self) -> None: + """Terminate the subprocess.""" + if self._proc and self._proc.poll() is None: + log.info("dingtalk.process.terminating", {"pid": self._proc.pid}) + self._proc.terminate() + try: + self._proc.wait(timeout=5) + except subprocess.TimeoutExpired: + self._proc.kill() + self._proc.wait() + log.info("dingtalk.process.stopped", {"pid": self._proc.pid}) + self._proc = None + + async def _monitor(self, abort_event: Optional[asyncio.Event]) -> None: + """Monitor the subprocess; log errors on non-zero exit; stop when abort_event fires.""" + try: + while True: + if abort_event and abort_event.is_set(): + log.info("dingtalk.monitor.abort") + break + + # Non-blocking check whether the process has exited + if self._proc and self._proc.poll() is not None: + rc = self._proc.returncode + if rc != 0: + log.error("dingtalk.process.exited_unexpectedly", {"returncode": rc}) + self.mark_disconnected(f"runner.ts exited unexpectedly, exit code={rc}") + else: + log.info("dingtalk.process.exited_normally", {"returncode": rc}) + break + + await asyncio.sleep(2) + except asyncio.CancelledError: + pass + finally: + self._kill_process() + + +# Discovered by flocks PluginLoader via this variable +CHANNELS = [DingTalkChannel()] diff --git a/.flocks/plugins/channels/dingtalk/runner.ts b/.flocks/plugins/channels/dingtalk/runner.ts new file mode 100644 index 0000000..f1d8101 --- /dev/null +++ b/.flocks/plugins/channels/dingtalk/runner.ts @@ -0,0 +1,355 @@ +/** + * runner.ts — flocks DingTalk bridge + * + * Constructs a minimal OpenClaw PluginRuntime/ClawdbotPluginApi shim so that + * plugin.ts can run inside the flocks environment without any modifications. + * + * Key substitution: + * plugin.ts internally calls streamFromGateway(), which posts to + * POST http://127.0.0.1:{port}/v1/chat/completions (SSE) + * We point that port at a lightweight HTTP proxy embedded in this file. + * The proxy translates the OpenAI format into real flocks API calls: + * POST /api/session → create or reuse a session + * POST /api/session/{id}/message → trigger inference + * GET /api/event → SSE, filter message.part.updated.delta + * Results are streamed back to plugin.ts as OpenAI SSE chunks — zero + * modifications to plugin.ts required. + * + * Startup (invoked by dingtalk.py via subprocess): + * DINGTALK_CLIENT_ID=xxx DINGTALK_CLIENT_SECRET=xxx FLOCKS_PORT=8000 bun run runner.ts + */ + +import plugin from './dingtalk-openclaw-connector/plugin.ts'; +import { createServer, type IncomingMessage, type ServerResponse } from 'http'; + +// ── Environment variables ──────────────────────────────────────────────────── +const CLIENT_ID = process.env.DINGTALK_CLIENT_ID || ''; +const CLIENT_SECRET = process.env.DINGTALK_CLIENT_SECRET || ''; +const FLOCKS_PORT = parseInt(process.env.FLOCKS_PORT || '8000', 10); +const FLOCKS_AGENT = process.env.FLOCKS_AGENT || ''; +const GATEWAY_TOKEN = process.env.FLOCKS_GATEWAY_TOKEN || ''; +const DEBUG = process.env.DINGTALK_DEBUG === 'true'; +const ACCOUNT_ID = process.env.DINGTALK_ACCOUNT_ID || '__default__'; + +// Proxy listens on a random port; plugin.ts's streamFromGateway calls land here +const PROXY_HOST = '127.0.0.1'; +let PROXY_PORT = 0; // resolved after startup + +if (!CLIENT_ID || !CLIENT_SECRET) { + console.error('[runner] Missing environment variables DINGTALK_CLIENT_ID / DINGTALK_CLIENT_SECRET'); + process.exit(1); +} + +const FLOCKS_BASE = `http://127.0.0.1:${FLOCKS_PORT}`; + +// ── Session map: session_key → flocks session_id ─────────────────────────── +const sessionMap = new Map(); + +/** + * Parse a sessionKey (possibly a JSON string) into a human-readable session title. + * Format is consistent with Feishu/WeCom: + * DM → [Dingtalk] DM — {senderName} + * Group → [Dingtalk] {chatId} + */ +function buildSessionTitle(sessionKey: string): string { + try { + const info = JSON.parse(sessionKey); + const chatType: string = info.chatType || ''; + const senderName: string = info.senderName || info.peerId || sessionKey; + const chatId: string = info.peerId || info.chatId || sessionKey; + if (chatType === 'direct') { + return `[Dingtalk] DM — ${senderName}`; + } + return `[Dingtalk] ${chatId}`; + } catch { + // sessionKey is not JSON, use it as-is + return `[Dingtalk] ${sessionKey}`; + } +} + +async function getOrCreateSession(sessionKey: string, agentName: string): Promise { + const existing = sessionMap.get(sessionKey); + if (existing) { + // Verify the session still exists + try { + const r = await fetch(`${FLOCKS_BASE}/api/session/${existing}`); + if (r.ok) return existing; + } catch {} + sessionMap.delete(sessionKey); + } + + const body: any = { title: buildSessionTitle(sessionKey) }; + if (agentName) body.agent = agentName; + + const r = await fetch(`${FLOCKS_BASE}/api/session`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(body), + }); + if (!r.ok) throw new Error(`Failed to create session: ${r.status} ${await r.text()}`); + + const data: any = await r.json(); + const sessionId: string = data.id; + sessionMap.set(sessionKey, sessionId); + console.log(`[runner] session created: key=${sessionKey} id=${sessionId}`); + return sessionId; +} + +// ── Convert flocks /api/event SSE to OpenAI delta SSE ──────────────────── +async function* flocksToOpenAIStream( + sessionId: string, + userText: string, + agentName: string, + systemPrompts: string[], +): AsyncGenerator { + // 1. Open event SSE connection first (before sending the message to avoid missing the first frame) + const eventUrl = `${FLOCKS_BASE}/api/event`; + const eventResp = await fetch(eventUrl, { + headers: { Accept: 'text/event-stream' }, + }); + if (!eventResp.ok || !eventResp.body) { + throw new Error(`Failed to connect to event SSE: ${eventResp.status}`); + } + + // 2. Send the user message to trigger inference + let fullText = userText; + if (systemPrompts.length > 0) { + const sys = systemPrompts.map(s => `\n${s}\n`).join('\n'); + fullText = `${sys}\n\n${userText}`; + } + + const msgBody: any = { + parts: [{ type: 'text', text: fullText }], + }; + if (agentName) msgBody.agent = agentName; + + const msgResp = await fetch(`${FLOCKS_BASE}/api/session/${sessionId}/message`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(msgBody), + }); + if (!msgResp.ok) { + throw new Error(`Failed to send message: ${msgResp.status} ${await msgResp.text()}`); + } + + // 3. Consume event SSE and extract message.part.updated deltas + const reader = eventResp.body.getReader(); + const decoder = new TextDecoder(); + let buffer = ''; + let finished = false; + + while (!finished) { + const { done, value } = await reader.read(); + if (done) break; + + buffer += decoder.decode(value, { stream: true }); + const lines = buffer.split('\n'); + buffer = lines.pop() || ''; + + for (const line of lines) { + if (!line.startsWith('data: ')) continue; + const raw = line.slice(6).trim(); + if (!raw || raw === '[DONE]') continue; + + let event: any; + try { event = JSON.parse(raw); } catch { continue; } + + const type = event.type; + const props = event.properties || {}; + + // text delta → OpenAI chunk + if (type === 'message.part.updated') { + const delta: string = props.delta || ''; + const partType: string = props.part?.type || ''; + if (delta && partType === 'text') { + yield openAIChunk(delta); + } + } + + // Inference completion signal + if (type === 'message.updated') { + const finish = props.info?.finish; + if (finish === 'stop' || finish === 'error') { + finished = true; + } + } + } + } + + reader.cancel().catch(() => {}); +} + +function openAIChunk(delta: string, finish?: string): string { + const chunk = { + id: 'chatcmpl-flocks', + object: 'chat.completion.chunk', + created: Math.floor(Date.now() / 1000), + model: 'flocks', + choices: [{ + index: 0, + delta: delta ? { content: delta } : {}, + finish_reason: finish ?? null, + }], + }; + return `data: ${JSON.stringify(chunk)}\n\n`; +} + +// ── Embedded HTTP proxy: translate /v1/chat/completions into flocks calls ── +function startProxy(): Promise { + return new Promise((resolve) => { + const server = createServer(async (req: IncomingMessage, res: ServerResponse) => { + if (req.method !== 'POST' || req.url !== '/v1/chat/completions') { + res.writeHead(404); + res.end('Not found'); + return; + } + + // Read request body + const chunks: Buffer[] = []; + for await (const chunk of req) chunks.push(chunk as Buffer); + let body: any; + try { body = JSON.parse(Buffer.concat(chunks).toString()); } + catch { res.writeHead(400); res.end('Bad JSON'); return; } + + const messages: any[] = body.messages || []; + const sessionKey: string = body.user || 'default'; + const agentName: string = + (req.headers['x-openclaw-agent-id'] as string) || FLOCKS_AGENT || ''; + + const systemPrompts = messages + .filter(m => m.role === 'system' && m.content) + .map(m => m.content as string); + + let userText = ''; + for (let i = messages.length - 1; i >= 0; i--) { + if (messages[i].role === 'user') { + userText = typeof messages[i].content === 'string' + ? messages[i].content + : String(messages[i].content); + break; + } + } + + if (DEBUG) { + console.log(`[proxy] session_key=${sessionKey} agent=${agentName} preview=${userText.slice(0, 60)}`); + } + + if (!userText) { + res.writeHead(200, { 'Content-Type': 'text/event-stream' }); + res.write(openAIChunk('', 'stop')); + res.write('data: [DONE]\n\n'); + res.end(); + return; + } + + res.writeHead(200, { + 'Content-Type': 'text/event-stream', + 'Cache-Control': 'no-cache', + 'X-Accel-Buffering': 'no', + }); + + try { + const sessionId = await getOrCreateSession(sessionKey, agentName); + for await (const chunk of flocksToOpenAIStream(sessionId, userText, agentName, systemPrompts)) { + res.write(chunk); + } + res.write(openAIChunk('', 'stop')); + res.write('data: [DONE]\n\n'); + } catch (err: any) { + console.error('[proxy] Request failed:', err.message); + res.write(`data: ${JSON.stringify({ error: { message: err.message } })}\n\n`); + res.write('data: [DONE]\n\n'); + } + res.end(); + }); + + server.listen(0, PROXY_HOST, () => { + const addr = server.address() as { port: number }; + PROXY_PORT = addr.port; + console.log(`[runner] proxy listening on ${PROXY_HOST}:${PROXY_PORT} → flocks :${FLOCKS_PORT}`); + resolve(PROXY_PORT); + }); + }); +} + +// ── Fake runtime shim ─────────────────────────────────────────────────────── +const fakeRuntime = { + gateway: { port: PROXY_PORT }, // updated with the actual port after startProxy() + channel: { + activity: { + record: (channelId: string, accountId: string, event: string) => { + if (DEBUG) console.log(`[runner][activity] ${channelId}/${accountId}: ${event}`); + }, + }, + }, +}; + +// ── Fake API shim ─────────────────────────────────────────────────────────── +const fakeApi: any = { + runtime: fakeRuntime, + logger: { + info: (msg: string) => console.log(`[plugin] ${msg}`), + warn: (msg: string) => console.warn(`[plugin] ${msg}`), + error: (msg: string) => console.error(`[plugin] ${msg}`), + debug: (msg: string) => { if (DEBUG) console.log(`[plugin:debug] ${msg}`); }, + }, + + registerChannel({ plugin: channelPlugin }: any) { + console.log(`[runner] registerChannel → starting startAccount (accountId=${ACCOUNT_ID})`); + + const abortController = new AbortController(); + const shutdown = () => { + console.log('[runner] shutdown signal received, aborting...'); + abortController.abort(); + }; + process.once('SIGTERM', shutdown); + process.once('SIGINT', shutdown); + + // cfg.gateway.port points to the local proxy + const cfg = { + channels: { + 'dingtalk-connector': { + clientId: CLIENT_ID, + clientSecret: CLIENT_SECRET, + gatewayToken: GATEWAY_TOKEN, + debug: DEBUG, + ...(FLOCKS_AGENT ? { defaultAgent: FLOCKS_AGENT } : {}), + }, + }, + gateway: { port: PROXY_PORT }, + }; + + channelPlugin.gateway.startAccount({ + account: { + accountId: ACCOUNT_ID, + config: cfg.channels['dingtalk-connector'], + }, + cfg, + abortSignal: abortController.signal, + log: { + info: (msg: string) => console.log(`[dingtalk] ${msg}`), + warn: (msg: string) => console.warn(`[dingtalk] ${msg}`), + error: (msg: string) => console.error(`[dingtalk] ${msg}`), + debug: (msg: string) => { if (DEBUG) console.log(`[dingtalk:debug] ${msg}`); }, + }, + }).catch((err: Error) => { + console.error('[runner] startAccount error:', err.message); + process.exit(1); + }); + }, + + registerGatewayMethod(name: string, _fn: any) { + if (DEBUG) console.log(`[runner] registerGatewayMethod: ${name} (noop)`); + }, +}; + +// ── Startup: launch proxy first, then register the plugin ─────────────────── +(async () => { + await startProxy(); + + // Sync the resolved port into fakeRuntime (cfg.gateway.port is set inline in registerChannel) + fakeRuntime.gateway.port = PROXY_PORT; + + console.log(`[runner] starting DingTalk connector → flocks :${FLOCKS_PORT}`); + plugin.register(fakeApi); +})(); From 8ec37a2375c0762bacfdd0cc22e5f8aedb73d4ca Mon Sep 17 00:00:00 2001 From: duguwanglong Date: Fri, 10 Apr 2026 15:57:55 +0800 Subject: [PATCH 2/3] fix(channel): resolve DingTalk always-connecting after restart Four bugs caused channel status to stay stuck in "connecting": 1. _kill_process() in dingtalk.py used subprocess.wait(timeout=5) which blocked the asyncio event loop for up to 5 seconds, causing a race condition where the new channel task started before the old task fully unwound and called mark_disconnected(). Fix: replace with _kill_process_async() using asyncio.to_thread. 2. _monitor() swallowed subprocess non-zero exit by calling mark_disconnected() and returning normally, so _run_with_reconnect treated it as a clean exit and retried immediately with no backoff. Fix: raise RuntimeError on non-zero exit to trigger exponential backoff. 3. _run_with_reconnect had no delay after a clean start() return, causing a tight busy-loop when the connection drops immediately after connect. Fix: sleep RECONNECT_BASE_DELAY (1s) before retrying clean exits. 4. get_status() called registry.get(channel_id) which returned the latest registered plugin instance after each file-watcher rescan, while the running task held a reference to an older instance. The new instance always had connected=False, making the UI show "connecting" forever. Fix: add _running_plugins dict to GatewayManager that pins the plugin instance used by the running task; all status/stop paths use it. Made-with: Cursor --- .flocks/plugins/channels/dingtalk/dingtalk.py | 47 +++++++++++-------- flocks/channel/gateway/manager.py | 17 +++++-- 2 files changed, 42 insertions(+), 22 deletions(-) diff --git a/.flocks/plugins/channels/dingtalk/dingtalk.py b/.flocks/plugins/channels/dingtalk/dingtalk.py index 09aa59e..e86388f 100644 --- a/.flocks/plugins/channels/dingtalk/dingtalk.py +++ b/.flocks/plugins/channels/dingtalk/dingtalk.py @@ -165,7 +165,7 @@ async def start( async def stop(self) -> None: if self._monitor_task and not self._monitor_task.done(): self._monitor_task.cancel() - self._kill_process() + await self._kill_process_async() self.mark_disconnected() # ── Outbound messages ───────────────────────────────────────────────────── @@ -207,21 +207,25 @@ def _start_process(self, npm: str, env: dict) -> None: ) log.info("dingtalk.process.started", {"pid": self._proc.pid}) - def _kill_process(self) -> None: - """Terminate the subprocess.""" - if self._proc and self._proc.poll() is None: - log.info("dingtalk.process.terminating", {"pid": self._proc.pid}) - self._proc.terminate() - try: - self._proc.wait(timeout=5) - except subprocess.TimeoutExpired: - self._proc.kill() - self._proc.wait() - log.info("dingtalk.process.stopped", {"pid": self._proc.pid}) + async def _kill_process_async(self) -> None: + """Terminate the subprocess without blocking the asyncio event loop.""" + proc = self._proc self._proc = None + if proc is None or proc.poll() is not None: + return + pid = proc.pid + log.info("dingtalk.process.terminating", {"pid": pid}) + proc.terminate() + try: + await asyncio.wait_for(asyncio.to_thread(proc.wait), timeout=5.0) + except asyncio.TimeoutError: + proc.kill() + await asyncio.to_thread(proc.wait) + log.info("dingtalk.process.stopped", {"pid": pid}) async def _monitor(self, abort_event: Optional[asyncio.Event]) -> None: - """Monitor the subprocess; log errors on non-zero exit; stop when abort_event fires.""" + """Monitor the subprocess; raise RuntimeError on non-zero exit; stop when abort_event fires.""" + exit_code: Optional[int] = None try: while True: if abort_event and abort_event.is_set(): @@ -230,19 +234,24 @@ async def _monitor(self, abort_event: Optional[asyncio.Event]) -> None: # Non-blocking check whether the process has exited if self._proc and self._proc.poll() is not None: - rc = self._proc.returncode - if rc != 0: - log.error("dingtalk.process.exited_unexpectedly", {"returncode": rc}) - self.mark_disconnected(f"runner.ts exited unexpectedly, exit code={rc}") + exit_code = self._proc.returncode + if exit_code != 0: + log.error("dingtalk.process.exited_unexpectedly", {"returncode": exit_code}) else: - log.info("dingtalk.process.exited_normally", {"returncode": rc}) + log.info("dingtalk.process.exited_normally", {"returncode": exit_code}) break await asyncio.sleep(2) except asyncio.CancelledError: pass finally: - self._kill_process() + # Non-blocking cleanup: must not block the event loop while waiting for + # the Node.js process to exit (can take up to 5s with SIGTERM). + await self._kill_process_async() + + # Raise after cleanup so the gateway reconnect loop applies exponential backoff. + if exit_code is not None and exit_code != 0: + raise RuntimeError(f"runner.ts exited unexpectedly, exit code={exit_code}") # Discovered by flocks PluginLoader via this variable diff --git a/flocks/channel/gateway/manager.py b/flocks/channel/gateway/manager.py index 46cc94f..555762e 100644 --- a/flocks/channel/gateway/manager.py +++ b/flocks/channel/gateway/manager.py @@ -35,6 +35,7 @@ class GatewayManager: def __init__(self, registry: Optional[ChannelRegistry] = None) -> None: self._registry = registry or default_registry self._running: dict[str, asyncio.Task] = {} + self._running_plugins: dict[str, ChannelPlugin] = {} self._abort_events: dict[str, asyncio.Event] = {} self._dispatcher: Optional[InboundDispatcher] = None self._started_at: Optional[float] = None @@ -84,6 +85,7 @@ async def start_all(self) -> None: name=f"channel-{channel_id}", ) self._running[channel_id] = task + self._running_plugins[channel_id] = plugin log.info("gateway.channel_started", {"channel": channel_id}) async def stop_all(self) -> None: @@ -101,7 +103,7 @@ async def stop_all(self) -> None: await asyncio.gather(*pending, return_exceptions=True) for channel_id in list(self._running.keys()): - plugin = self._registry.get(channel_id) + plugin = self._running_plugins.get(channel_id) or self._registry.get(channel_id) if plugin: try: await plugin.stop() @@ -111,6 +113,7 @@ async def stop_all(self) -> None: }) self._running.clear() + self._running_plugins.clear() self._abort_events.clear() try: @@ -123,7 +126,9 @@ def get_status(self) -> dict[str, ChannelStatus]: """Snapshot of every running channel's health status.""" result: dict[str, ChannelStatus] = {} for channel_id in self._running: - plugin = self._registry.get(channel_id) + # Use the plugin instance the running task holds, not the registry's + # latest instance (registry may have been updated by the file watcher). + plugin = self._running_plugins.get(channel_id) or self._registry.get(channel_id) if plugin: status = plugin.status if status.started_at: @@ -183,6 +188,7 @@ async def start_channel(self, channel_id: str) -> None: name=f"channel-{channel_id}", ) self._running[channel_id] = task + self._running_plugins[channel_id] = plugin log.info("gateway.channel_started", {"channel": channel_id}) async def stop_channel(self, channel_id: str) -> None: @@ -198,7 +204,7 @@ async def stop_channel(self, channel_id: str) -> None: except (asyncio.TimeoutError, Exception): task.cancel() - plugin = self._registry.get(channel_id) + plugin = self._running_plugins.pop(channel_id, None) or self._registry.get(channel_id) if plugin: try: await plugin.stop() @@ -286,6 +292,11 @@ async def _run_with_reconnect( delay = self.RECONNECT_BASE_DELAY attempt += 1 + # Brief pause before retrying a clean exit to avoid busy-looping + # when the remote connection drops immediately after connect. + if await self._sleep_or_abort(abort_event, self.RECONNECT_BASE_DELAY): + break + except asyncio.CancelledError: break except Exception as e: From c388a06318cfa3141d045950190c4dcb7f230f80 Mon Sep 17 00:00:00 2001 From: duguwanglong Date: Fri, 10 Apr 2026 16:13:29 +0800 Subject: [PATCH 3/3] =?UTF-8?q?fix(dingtalk):=20address=20PR=20review=20?= =?UTF-8?q?=E2=80=94=20session=20filter,=20config=20passthrough,=20node=5F?= =?UTF-8?q?modules=20check?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - runner.ts: filter SSE events by sessionId in flocksToOpenAIStream to prevent delta/finish signals from concurrent sessions being mixed into the wrong stream (props.part.sessionID for message.part.updated, props.info.sessionID for message.updated) - dingtalk.py + runner.ts: forward optional plugin.ts config fields (dmPolicy, allowFrom, separateSessionByConversation, groupSessionScope, sharedMemoryAcrossConversations) via env vars so flocks.json settings match documented behaviour - dingtalk.py: validate_config now checks node_modules exists and emits a clear install hint if npm/bun install has not been run yet - dingtalk.py: document that on_message/InboundDispatcher is intentionally unused; DingTalk routes messages directly through the Session API inside the runner.ts ↔ plugin.ts layer Made-with: Cursor --- .flocks/plugins/channels/dingtalk/dingtalk.py | 21 +++++++++++++++++- .flocks/plugins/channels/dingtalk/runner.ts | 22 +++++++++++++++++++ 2 files changed, 42 insertions(+), 1 deletion(-) diff --git a/.flocks/plugins/channels/dingtalk/dingtalk.py b/.flocks/plugins/channels/dingtalk/dingtalk.py index e86388f..1130542 100644 --- a/.flocks/plugins/channels/dingtalk/dingtalk.py +++ b/.flocks/plugins/channels/dingtalk/dingtalk.py @@ -119,6 +119,12 @@ def validate_config(self, config: dict) -> Optional[str]: return f"runner.ts not found: {_RUNNER_TS}" if not _CONNECTOR_PACKAGE.exists(): return f"package.json not found: {_CONNECTOR_PACKAGE}" + node_modules = _CONNECTOR_DIR / "node_modules" + if not node_modules.is_dir(): + return ( + f"node_modules not found in {_CONNECTOR_DIR}. " + "Run `npm install` (or `bun install`) inside that directory first." + ) return None # ── Lifecycle ───────────────────────────────────────────────────────────── @@ -129,7 +135,14 @@ async def start( on_message: Callable[[InboundMessage], Awaitable[None]], abort_event: Optional[asyncio.Event] = None, ) -> None: - """Start the runner.ts subprocess and monitor it until abort_event fires.""" + """Start the runner.ts subprocess and monitor it until abort_event fires. + + Design note: DingTalk inbound messages are handled entirely inside the + runner.ts ↔ plugin.ts layer, which calls the flocks Session API directly. + The `on_message` / InboundDispatcher path (used by Feishu, WeCom, Telegram) + is intentionally NOT used here; this means dedup, debounce, channel.inbound + hooks and session-binding are the responsibility of plugin.ts itself. + """ self._config = config self._on_message = on_message @@ -145,6 +158,12 @@ async def start( "FLOCKS_GATEWAY_TOKEN": config.get("gatewayToken", ""), "DINGTALK_DEBUG": "true" if config.get("debug") else "false", "DINGTALK_ACCOUNT_ID": config.get("_account_id", "__default__"), + # Optional policy / behaviour fields forwarded to plugin.ts + "DINGTALK_DM_POLICY": str(config.get("dmPolicy", "")), + "DINGTALK_ALLOW_FROM": ",".join(config.get("allowFrom") or []), + "DINGTALK_SEPARATE_SESSION": "true" if config.get("separateSessionByConversation", True) else "false", + "DINGTALK_GROUP_SESSION_SCOPE": str(config.get("groupSessionScope", "")), + "DINGTALK_SHARED_MEMORY": "true" if config.get("sharedMemoryAcrossConversations") else "false", } log.info("dingtalk.start", { diff --git a/.flocks/plugins/channels/dingtalk/runner.ts b/.flocks/plugins/channels/dingtalk/runner.ts index f1d8101..bf97d2f 100644 --- a/.flocks/plugins/channels/dingtalk/runner.ts +++ b/.flocks/plugins/channels/dingtalk/runner.ts @@ -31,6 +31,14 @@ const GATEWAY_TOKEN = process.env.FLOCKS_GATEWAY_TOKEN || ''; const DEBUG = process.env.DINGTALK_DEBUG === 'true'; const ACCOUNT_ID = process.env.DINGTALK_ACCOUNT_ID || '__default__'; +// Optional policy / behaviour fields forwarded from flocks.json → plugin.ts +const DM_POLICY = process.env.DINGTALK_DM_POLICY || ''; +const ALLOW_FROM_RAW = process.env.DINGTALK_ALLOW_FROM || ''; +const ALLOW_FROM = ALLOW_FROM_RAW ? ALLOW_FROM_RAW.split(',').map(s => s.trim()).filter(Boolean) : undefined; +const SEPARATE_SESSION = process.env.DINGTALK_SEPARATE_SESSION !== 'false'; // default true +const GROUP_SESSION_SCOPE = process.env.DINGTALK_GROUP_SESSION_SCOPE || ''; +const SHARED_MEMORY = process.env.DINGTALK_SHARED_MEMORY === 'true'; + // Proxy listens on a random port; plugin.ts's streamFromGateway calls land here const PROXY_HOST = '127.0.0.1'; let PROXY_PORT = 0; // resolved after startup @@ -159,6 +167,11 @@ async function* flocksToOpenAIStream( // text delta → OpenAI chunk if (type === 'message.part.updated') { + // Only consume events belonging to this session to avoid mixing + // deltas from concurrent DingTalk / Web / TUI conversations. + const eventSessionId: string = props.part?.sessionID || ''; + if (eventSessionId && eventSessionId !== sessionId) continue; + const delta: string = props.delta || ''; const partType: string = props.part?.type || ''; if (delta && partType === 'text') { @@ -168,6 +181,9 @@ async function* flocksToOpenAIStream( // Inference completion signal if (type === 'message.updated') { + const eventSessionId: string = props.info?.sessionID || ''; + if (eventSessionId && eventSessionId !== sessionId) continue; + const finish = props.info?.finish; if (finish === 'stop' || finish === 'error') { finished = true; @@ -314,6 +330,12 @@ const fakeApi: any = { gatewayToken: GATEWAY_TOKEN, debug: DEBUG, ...(FLOCKS_AGENT ? { defaultAgent: FLOCKS_AGENT } : {}), + // Optional policy / behaviour fields (only set when non-empty / non-default) + ...(DM_POLICY ? { dmPolicy: DM_POLICY } : {}), + ...(ALLOW_FROM ? { allowFrom: ALLOW_FROM } : {}), + ...(SEPARATE_SESSION ? {} : { separateSessionByConversation: false }), + ...(GROUP_SESSION_SCOPE ? { groupSessionScope: GROUP_SESSION_SCOPE } : {}), + ...(SHARED_MEMORY ? { sharedMemoryAcrossConversations: true } : {}), }, }, gateway: { port: PROXY_PORT },