Compare commits

...

10 Commits

Author SHA1 Message Date
ethan.chen
d1167ff9da refactor: Enhance database initialization script by improving comment removal, handling multi-line statements, and ensuring balanced parentheses for CREATE TABLE and CREATE INDEX statements 2026-01-08 11:34:21 +08:00
ethan.chen
02027bba62 refactor: Improve database initialization script by separating CREATE TABLE and CREATE INDEX statements, enhancing error handling, and providing clearer logging 2026-01-08 11:32:19 +08:00
ethan.chen
c47e59378b feat: Enhance database initialization script to create target database if it doesn't exist and improve error handling 2026-01-08 11:29:00 +08:00
ethan.chen
ca6fa71f38 feat: Migrate data storage to PostgreSQL with schema setup, initialization script, and update documentation 2026-01-08 11:14:35 +08:00
ethan.chen
62a9d01035 feat: git功能开发 2026-01-08 11:01:24 +08:00
ethan.chen
2458bfa111 feat: Enhance deployment capabilities with direct server deployment tools, email configuration, and comprehensive documentation 2026-01-07 16:56:31 +08:00
ethan.chen
8f8f852ce4 feat: Add Gitea Act Runner setup guide and update workflows to use self-hosted runners 2026-01-07 14:11:50 +08:00
ethan.chen
459f99e804 feat: Add Gitea Actions workflows and setup documentation for deployment 2026-01-07 14:06:12 +08:00
ethan.chen
6ab690fa40 feat: Add deployment scripts, Docker configuration, and documentation for Cloud MCP project 2026-01-07 13:54:32 +08:00
ethan.chen
47ecc40186 feat: Add testing framework and initial test cases for various tools and database operations 2026-01-07 10:05:04 +08:00
62 changed files with 6194 additions and 212 deletions

20
.dockerignore Normal file
View File

@@ -0,0 +1,20 @@
node_modules
dist
.git
.gitignore
.env
.env.local
*.log
.DS_Store
data
*.db
*.sqlite
*.sqlite3
tests
.vscode
.idea
*.md
!README.md
coverage
.nyc_output

View File

@@ -0,0 +1,18 @@
# Gitea Actions 工作流
本目录包含 Gitea Actions 工作流配置文件。
## 文件说明
- `deploy.yml` - 基础部署工作流
- `deploy-simple.yml` - 简化版部署工作流(推荐使用)
## 使用说明
1. 确保已安装并运行 Act Runner
2. 在 Gitea 仓库中配置 Secrets
3. 更新工作流中的项目路径
4. 推送代码或手动触发工作流
详细配置请参考 `GITEA_ACTIONS_SETUP.md`

View File

@@ -0,0 +1,64 @@
name: Deploy to Server (Simple)
on:
push:
branches:
- main
- master
workflow_dispatch:
jobs:
deploy:
# 使用 self-hosted runner匹配任何自托管 Runner
# 或者使用你实际注册的 Runner 标签
runs-on: self-hosted
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Deploy to server via SSH
env:
SSH_KEY: ${{ secrets.SERVER_SSH_KEY }}
SSH_USER: ${{ secrets.SERVER_USERNAME }}
SSH_HOST: ${{ secrets.SERVER_HOST }}
run: |
# Setup SSH
mkdir -p ~/.ssh
echo "$SSH_KEY" > ~/.ssh/deploy_key
chmod 600 ~/.ssh/deploy_key
# Add host to known_hosts (optional, for security)
ssh-keyscan -H "$SSH_HOST" >> ~/.ssh/known_hosts 2>/dev/null || true
# Deploy
ssh -i ~/.ssh/deploy_key \
-o StrictHostKeyChecking=no \
-o UserKnownHostsFile=~/.ssh/known_hosts \
"$SSH_USER@$SSH_HOST" << 'ENDSSH'
set -e
echo "Starting deployment..."
# Navigate to project directory (update this path)
cd /opt/cloud-mcp || {
echo "Error: Project directory not found"
exit 1
}
# Pull latest code
echo "Pulling latest code..."
git fetch origin
git checkout main || git checkout master
git pull origin main || git pull origin master
# Run deployment script
echo "Running deployment script..."
chmod +x deploy-gitea.sh
./deploy-gitea.sh
echo "Deployment completed!"
ENDSSH
# Cleanup
rm -f ~/.ssh/deploy_key

View File

@@ -0,0 +1,42 @@
name: Deploy to Server
on:
push:
branches:
- main
- master
workflow_dispatch:
jobs:
deploy:
# 使用 self-hosted runner不指定特定标签
# 或者使用你实际注册的 Runner 标签
runs-on: self-hosted
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Setup SSH
run: |
mkdir -p ~/.ssh
echo "${{ secrets.SERVER_SSH_KEY }}" > ~/.ssh/deploy_key
chmod 600 ~/.ssh/deploy_key
ssh-keyscan -H ${{ secrets.SERVER_HOST }} >> ~/.ssh/known_hosts 2>/dev/null || true
- name: Deploy to server
run: |
ssh -i ~/.ssh/deploy_key \
-o StrictHostKeyChecking=no \
-o UserKnownHostsFile=~/.ssh/known_hosts \
${{ secrets.SERVER_USERNAME }}@${{ secrets.SERVER_HOST }} << 'ENDSSH'
set -e
cd /opt/cloud-mcp || cd /path/to/cloud-mcp
git pull origin main || git pull origin master
./deploy-gitea.sh
ENDSSH
- name: Cleanup
if: always()
run: |
rm -f ~/.ssh/deploy_key

28
.github/workflows/deploy.yml vendored Normal file
View File

@@ -0,0 +1,28 @@
name: Deploy to Server
on:
push:
branches:
- main
- master
workflow_dispatch:
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Deploy to server
uses: appleboy/ssh-action@master
with:
host: ${{ secrets.SERVER_HOST }}
username: ${{ secrets.SERVER_USERNAME }}
key: ${{ secrets.SERVER_SSH_KEY }}
script: |
cd /path/to/cloud-mcp
git pull origin main
./deploy.sh --rebuild

7
.gitignore vendored
View File

@@ -9,3 +9,10 @@ data/
*.sqlite
*.sqlite3
# Deployment
deploy.log
*.log
# Gitea Actions
.gitea/workflows/*.yml.bak

308
DEPLOY.md Normal file
View File

@@ -0,0 +1,308 @@
# 部署文档
## 概述
本项目支持使用 Docker 进行部署,可以通过手动脚本或 Gitea Webhook 自动部署。
## 前置要求
- Docker 和 Docker Compose 已安装
- Git 已安装
- 服务器有足够的资源(建议至少 512MB 内存)
## 快速开始
### 1. 克隆项目到服务器
```bash
git clone <your-gitea-repo-url> /path/to/cloud-mcp
cd /path/to/cloud-mcp
```
### 2. 配置环境变量
复制环境变量模板并编辑:
```bash
cp env.template .env
nano .env # 或使用你喜欢的编辑器
```
配置必要的环境变量NAS、服务器、路由器等
### 3. 创建数据目录
```bash
mkdir -p data
```
### 4. 首次部署
```bash
./deploy.sh --rebuild
```
## 部署方式
### 方式一:手动部署脚本
使用 `deploy.sh` 脚本进行手动部署:
```bash
# 基本部署(使用缓存)
./deploy.sh
# 从 Git 拉取最新代码并部署
./deploy.sh --pull
# 强制重新构建(不使用缓存)
./deploy.sh --rebuild
# 拉取代码并重新构建
./deploy.sh --pull --rebuild
```
### 方式二Gitea Webhook 自动部署
#### 1. 配置部署脚本
编辑 `deploy-gitea.sh`,更新以下变量:
```bash
PROJECT_DIR="/path/to/cloud-mcp" # 你的项目路径
BRANCH="main" # 或 "master"
```
#### 2. 在 Gitea 中配置 Webhook
1. 进入仓库:**Settings** -> **Webhooks** -> **Add Webhook**
2. 配置如下:
- **Target URL**: `http://your-server:port/gitea-webhook` (如果使用 webhook 服务器)
- 或者直接在服务器上设置 Git Hook
3. **Content Type**: `application/json`
4. **Secret**: (可选) 设置一个密钥
5. **Events**: 选择 `Push` 事件
#### 3. 设置 Git Hook推荐
在服务器上设置 Git post-receive hook
```bash
cd /path/to/cloud-mcp/.git/hooks
cat > post-receive << 'EOF'
#!/bin/bash
cd /path/to/cloud-mcp
/path/to/cloud-mcp/deploy-gitea.sh
EOF
chmod +x post-receive
```
或者使用 Gitea 的 Webhook 触发脚本:
```bash
# 安装 webhook 服务器(如 webhook
# 然后配置 webhook 调用 deploy-gitea.sh
```
#### 4. 测试自动部署
```bash
# 在本地推送代码
git push origin main
# 在服务器上查看日志
tail -f /path/to/cloud-mcp/deploy.log
```
## Docker Compose 命令
### 基本操作
```bash
# 启动服务
docker-compose up -d
# 停止服务
docker-compose down
# 查看日志
docker-compose logs -f
# 重启服务
docker-compose restart
# 查看状态
docker-compose ps
```
### 手动更新
```bash
# 拉取最新代码
git pull origin main
# 重新构建并启动
docker-compose build --no-cache
docker-compose up -d
```
## 部署脚本命令
`deploy-gitea.sh` 支持多种操作:
```bash
# 完整部署(默认)
./deploy-gitea.sh
# 仅拉取代码
./deploy-gitea.sh --pull-only
# 重新构建
./deploy-gitea.sh --rebuild
# 查看状态
./deploy-gitea.sh --status
# 查看日志
./deploy-gitea.sh --logs
# 停止服务
./deploy-gitea.sh --stop
# 启动服务
./deploy-gitea.sh --start
# 重启服务
./deploy-gitea.sh --restart
```
## 数据持久化
数据存储在 `./data` 目录,通过 Docker volume 挂载:
- 代码片段:`data/codeSnippets.json`
- 笔记:`data/notes.json`
- 任务:`data/tasks.json`
- 其他数据文件...
**重要**:定期备份 `data` 目录!
## 监控和维护
### 查看容器状态
```bash
docker ps --filter "name=cloud-mcp"
```
### 查看日志
```bash
# 实时日志
docker logs -f cloud-mcp
# 最近 100 行
docker logs --tail 100 cloud-mcp
```
### 进入容器
```bash
docker exec -it cloud-mcp /bin/sh
```
### 清理资源
```bash
# 清理未使用的镜像
docker image prune -f
# 清理未使用的容器和网络
docker system prune -f
```
## 故障排查
### 容器无法启动
1. 检查日志:`docker logs cloud-mcp`
2. 检查环境变量:确保 `.env` 文件配置正确
3. 检查端口冲突:确保没有其他服务占用端口
4. 检查磁盘空间:`df -h`
### 数据丢失
1. 检查 volume 挂载:`docker inspect cloud-mcp | grep Mounts`
2. 检查数据目录权限
3. 从备份恢复数据
### 自动部署不工作
1. 检查 webhook 配置
2. 检查脚本权限:`chmod +x deploy-gitea.sh`
3. 检查日志:`tail -f deploy.log`
4. 手动测试脚本:`./deploy-gitea.sh --status`
## 安全建议
1. **不要将 `.env` 文件提交到 Git**
2. 使用强密码和 SSH 密钥
3. 定期更新 Docker 镜像
4. 限制服务器访问权限
5. 使用防火墙限制端口访问
6. 定期备份数据
## 性能优化
1. **资源限制**:在 `docker-compose.yml` 中添加资源限制:
```yaml
services:
cloud-mcp:
# ...
deploy:
resources:
limits:
cpus: '1'
memory: 512M
reservations:
cpus: '0.5'
memory: 256M
```
2. **日志轮转**:配置 Docker 日志驱动限制日志大小
3. **健康检查**:已包含在 `docker-compose.yml`
## 更新流程
### 开发流程
1. 在本地开发并测试
2. 提交代码到 Git
3. 推送到 Gitea 仓库
4. Webhook 自动触发部署(如果配置)
5. 或手动运行 `./deploy-gitea.sh`
### 回滚
如果需要回滚到之前的版本:
```bash
# 查看提交历史
git log
# 回滚到指定提交
git checkout <commit-hash>
# 重新部署
./deploy-gitea.sh --rebuild
```
## 支持
如有问题,请查看:
- 项目 README.md
- 测试文档tests/README.md
- 日志文件deploy.log

295
DIRECT_DEPLOY.md Normal file
View File

@@ -0,0 +1,295 @@
# 直接部署方案(推荐)
## 概述
直接在服务器上运行 MCP 服务器(不使用 Docker这样可以直接使用 MCP 工具进行部署,无需 SSH 连接。
## 优势
**无需 SSH** - MCP 工具直接在服务器上运行
**更简单** - 不需要配置 Runner、Webhook 等
**更直接** - 在 Cursor 中直接触发部署
**更灵活** - 可以直接访问服务器文件系统
## 安装步骤
### 1. 在服务器上安装 Bun
```bash
# 安装 Bun
curl -fsSL https://bun.sh/install | bash
# 验证安装
bun --version
```
### 2. 克隆项目
```bash
git clone <your-gitea-repo-url> /opt/cloud-mcp
cd /opt/cloud-mcp
```
### 3. 安装依赖
```bash
bun install
```
### 4. 配置环境变量
```bash
cp env.template .env
nano .env # 编辑配置
```
### 5. 运行 MCP 服务器
#### 方式一:直接运行(测试)
```bash
bun run src/index.ts
```
#### 方式二:使用 PM2推荐
```bash
# 安装 PM2
bun add -g pm2
# 启动服务
pm2 start "bun run src/index.ts" --name cloud-mcp
# 设置开机自启
pm2 save
pm2 startup
```
#### 方式三:使用 systemd生产环境
创建服务文件:
```bash
sudo tee /etc/systemd/system/cloud-mcp.service > /dev/null <<EOF
[Unit]
Description=Cloud MCP Server
After=network.target
[Service]
Type=simple
User=root
WorkingDirectory=/opt/cloud-mcp
Environment="PATH=/usr/local/bin:/usr/bin:/bin"
ExecStart=/usr/local/bin/bun run src/index.ts
Restart=always
RestartSec=10
[Install]
WantedBy=multi-user.target
EOF
# 启用并启动服务
sudo systemctl daemon-reload
sudo systemctl enable cloud-mcp
sudo systemctl start cloud-mcp
sudo systemctl status cloud-mcp
```
## 在 Cursor 中配置 MCP
### 1. 配置 MCP 服务器
编辑 Cursor 的 MCP 配置(通常在 `~/.cursor/mcp.json` 或类似位置):
```json
{
"mcpServers": {
"cloud-mcp": {
"command": "ssh",
"args": [
"user@your-server",
"cd /opt/cloud-mcp && bun run src/index.ts"
]
}
}
}
```
**或者**,如果 MCP 服务器已经在运行,通过 stdio 连接:
```json
{
"mcpServers": {
"cloud-mcp": {
"command": "ssh",
"args": [
"user@your-server",
"bun run /opt/cloud-mcp/src/index.ts"
]
}
}
}
```
### 2. 使用部署工具
配置完成后,在 Cursor 中可以直接使用:
- `deploy_update` - 拉取最新代码并重新部署
- `deploy_status` - 查看部署状态
- `deploy_logs` - 查看部署日志
## 部署流程
### 自动部署(通过 MCP 工具)
1. **在 Cursor 中**,直接调用 `deploy_update` 工具
2. 工具会自动:
- 拉取最新代码
- 更新依赖(如果需要)
- 重启服务
### 手动部署
```bash
cd /opt/cloud-mcp
git pull origin main
bun install # 如果依赖有变化
pm2 restart cloud-mcp # 或 systemctl restart cloud-mcp
```
## 部署工具说明
### deploy_update
拉取最新代码并重新部署:
```
参数:
- branch: Git 分支(默认: main
- rebuild: 是否强制重建(默认: false
```
### deploy_status
查看当前部署状态:
- Git 状态
- 服务运行状态
- 是否有未提交的更改
### deploy_logs
查看部署日志:
- 参数lines显示行数默认 50
## 与 Docker 方案对比
| 特性 | 直接运行 | Docker |
|------|---------|--------|
| 部署复杂度 | ⭐ 简单 | ⭐⭐ 中等 |
| 资源占用 | 较低 | 较高 |
| 隔离性 | 较低 | 高 |
| MCP 工具访问 | ✅ 直接访问 | ❌ 需要 SSH |
| 适合场景 | 个人/小项目 | 生产环境 |
## 迁移 from Docker
如果之前使用 Docker迁移步骤
```bash
# 1. 停止 Docker 容器
docker-compose down
# 2. 备份数据
cp -r data data.backup
# 3. 安装 Bun如果还没有
curl -fsSL https://bun.sh/install | bash
# 4. 安装依赖
cd /opt/cloud-mcp
bun install
# 5. 启动服务(选择一种方式)
# PM2
pm2 start "bun run src/index.ts" --name cloud-mcp
# 或 systemd
sudo systemctl start cloud-mcp
```
## 故障排查
### MCP 服务器无法启动
```bash
# 检查 Bun 是否安装
bun --version
# 检查依赖
bun install
# 查看错误
bun run src/index.ts
```
### 部署工具执行失败
1. **检查 Git 仓库**
```bash
cd /opt/cloud-mcp
git status
```
2. **检查权限**
```bash
ls -la /opt/cloud-mcp
chmod +x deploy-gitea.sh
```
3. **检查服务状态**
```bash
# PM2
pm2 status
# systemd
sudo systemctl status cloud-mcp
```
### 查看日志
```bash
# PM2 日志
pm2 logs cloud-mcp
# systemd 日志
sudo journalctl -u cloud-mcp -f
# 部署日志
tail -f /opt/cloud-mcp/deploy.log
```
## 安全建议
1. **使用非 root 用户运行**(如果可能):
```bash
sudo adduser mcpuser
sudo chown -R mcpuser:mcpuser /opt/cloud-mcp
```
2. **限制文件权限**
```bash
chmod 600 .env
```
3. **定期更新**
```bash
bun update
```
## 下一步
- 配置自动备份
- 设置监控告警
- 优化性能配置

39
Dockerfile Normal file
View File

@@ -0,0 +1,39 @@
# Dockerfile for Cloud MCP Server
FROM oven/bun:1 AS base
WORKDIR /app
# Copy package files
COPY package.json bun.lockb* ./
# Install dependencies
RUN bun install --frozen-lockfile
# Copy source code
COPY . .
# Build the project
RUN bun run build
# Production stage
FROM oven/bun:1-slim
WORKDIR /app
# Copy package files and install production dependencies
COPY package.json bun.lockb* ./
RUN bun install --frozen-lockfile --production
# Copy built files from base stage
COPY --from=base /app/dist ./dist
COPY --from=base /app/src ./src
# Expose port (if needed for health checks)
EXPOSE 3000
# Set environment variables
ENV NODE_ENV=production
# Run the MCP server
CMD ["bun", "run", "src/index.ts"]

237
GITEA_ACTIONS_SETUP.md Normal file
View File

@@ -0,0 +1,237 @@
# Gitea Actions 部署配置指南
## 前置要求
### 1. 确保 Gitea Actions 已启用
在 Gitea 管理员设置中:
- 进入 **Site Administration****Actions**
- 确保 **Actions** 功能已启用
- 检查 `app.ini` 配置:
```ini
[actions]
ENABLED = true
```
### 2. 安装并运行 Act Runner
Act Runner 是 Gitea Actions 的执行器,需要在服务器上运行。
#### 下载 Act Runner
```bash
# 在服务器上执行
cd /tmp
wget https://gitea.com/gitea/act_runner/releases/download/v0.2.6/act_runner-linux-amd64
chmod +x act_runner-linux-amd64
sudo mv act_runner-linux-amd64 /usr/local/bin/act_runner
```
#### 注册 Runner
```bash
# 在服务器上执行
act_runner register \
--instance <your-gitea-url> \
--token <runner-token> \
--name my-runner \
--labels ubuntu-latest:docker://node:20-bullseye
```
**获取 Runner Token**
1. 进入 Gitea**Site Administration** → **Actions****Runners**
2. 点击 **New Runner**
3. 复制显示的 Token
#### 运行 Runner
```bash
# 作为服务运行(推荐)
sudo tee /etc/systemd/system/gitea-act-runner.service > /dev/null <<EOF
[Unit]
Description=Gitea Act Runner
After=network.target
[Service]
Type=simple
User=root
WorkingDirectory=/opt/cloud-mcp
ExecStart=/usr/local/bin/act_runner daemon
Restart=always
[Install]
WantedBy=multi-user.target
EOF
sudo systemctl daemon-reload
sudo systemctl enable gitea-act-runner
sudo systemctl start gitea-act-runner
sudo systemctl status gitea-act-runner
```
或者直接运行:
```bash
act_runner daemon
```
### 3. 配置 GitHub Secrets在 Gitea 中)
在 Gitea 仓库中设置 Secrets
1. 进入仓库:**Settings** → **Secrets****Actions**
2. 添加以下 Secrets
```
SERVER_HOST=你的服务器IP或域名
SERVER_USERNAME=部署用户名(如 root 或 deployer
SERVER_SSH_KEY=你的SSH私钥完整内容包括 -----BEGIN 和 -----END
```
**生成 SSH 密钥对**(如果还没有):
```bash
# 在本地或服务器上
ssh-keygen -t ed25519 -C "gitea-actions-deploy" -f ~/.ssh/gitea_deploy
# 将公钥添加到服务器的 authorized_keys
ssh-copy-id -i ~/.ssh/gitea_deploy.pub user@your-server
# 复制私钥内容作为 SERVER_SSH_KEY secret
cat ~/.ssh/gitea_deploy
```
### 4. 更新部署脚本路径
编辑 `.gitea/workflows/deploy.yml``deploy-simple.yml`,更新项目路径:
```yaml
cd /opt/cloud-mcp # 改为你的实际项目路径
```
## 工作流文件说明
项目包含两个工作流文件:
### deploy.yml
- 使用 SSH 密钥文件方式
- 包含清理步骤
### deploy-simple.yml推荐
- 使用环境变量方式
- 更详细的日志输出
- 更好的错误处理
## 测试部署
### 1. 手动触发
在 Gitea 仓库中:
- 进入 **Actions** 标签页
- 选择 **Deploy to Server** 工作流
- 点击 **Run workflow**
### 2. 推送代码触发
```bash
git add .
git commit -m "Test deployment"
git push origin main
```
### 3. 查看日志
在 Gitea 仓库的 **Actions** 页面查看工作流执行日志。
## 故障排查
### 工作流一直显示"等待中"
1. **检查 Runner 是否运行**
```bash
sudo systemctl status gitea-act-runner
# 或
ps aux | grep act_runner
```
2. **检查 Runner 标签匹配**
- 工作流中 `runs-on: ubuntu-latest`
- Runner 注册时需要包含 `ubuntu-latest` 标签
3. **查看 Runner 日志**
```bash
sudo journalctl -u gitea-act-runner -f
```
### SSH 连接失败
1. **测试 SSH 连接**
```bash
ssh -i ~/.ssh/deploy_key user@server
```
2. **检查 SSH 密钥格式**
- 确保私钥包含完整的 `-----BEGIN` 和 `-----END` 行
- 确保没有多余的空格或换行
3. **检查服务器防火墙**
```bash
# 确保 SSH 端口开放
sudo ufw allow 22
```
### 部署脚本执行失败
1. **检查脚本权限**
```bash
chmod +x /opt/cloud-mcp/deploy-gitea.sh
```
2. **检查项目路径**
- 确保工作流中的路径与实际路径一致
3. **查看服务器日志**
```bash
tail -f /opt/cloud-mcp/deploy.log
```
## 安全建议
1. **使用专用部署用户**
```bash
sudo adduser deployer
sudo usermod -aG docker deployer
```
2. **限制 SSH 密钥权限**
- 使用 `command=` 限制 SSH 密钥只能执行特定命令
- 在 `~/.ssh/authorized_keys` 中:
```
command="/opt/cloud-mcp/deploy-gitea.sh" ssh-ed25519 AAAAC3...
```
3. **使用 SSH 密钥而非密码**
- 禁用密码登录
- 只使用密钥认证
4. **定期轮换密钥**
- 定期更新 SSH 密钥
- 更新 Gitea Secrets
## 验证部署
部署成功后,检查:
```bash
# 在服务器上
docker ps | grep cloud-mcp
docker logs cloud-mcp
```
## 下一步
- 配置自动部署触发条件
- 添加部署通知邮件、Slack 等)
- 设置部署回滚机制

163
MCP_CONNECTION.md Normal file
View File

@@ -0,0 +1,163 @@
# MCP 服务器连接说明
## 重要MCP 服务器不监听端口!
MCP (Model Context Protocol) 服务器使用 **stdio标准输入输出** 传输,**不监听任何网络端口**。
这意味着:
- ❌ 没有 HTTP 端点
- ❌ 没有 WebSocket 连接
- ❌ 不能通过浏览器访问
- ✅ 通过进程间通信stdin/stdout与客户端通信
## 工作原理
```
Cursor/Claude Desktop (客户端)
↓ (启动进程)
bun run src/index.ts (MCP 服务器)
↓ (stdio 通信)
工具执行和响应
```
## 在 Cursor 中配置
### 本地运行(开发)
```json
{
"mcpServers": {
"cloud-mcp": {
"command": "bun",
"args": ["run", "/Users/zijianchen/Desktop/my-project/cloud-mcp/src/index.ts"]
}
}
}
```
### 服务器运行(生产)
如果 MCP 服务器运行在远程服务器上:
```json
{
"mcpServers": {
"cloud-mcp": {
"command": "ssh",
"args": [
"user@your-server",
"cd /opt/cloud-mcp && bun run src/index.ts"
]
}
}
}
```
或者如果服务器上已经通过 PM2/systemd 运行,可以通过 SSH 直接连接:
```json
{
"mcpServers": {
"cloud-mcp": {
"command": "ssh",
"args": [
"user@your-server",
"bun run /opt/cloud-mcp/src/index.ts"
]
}
}
}
```
## 验证连接
### 1. 检查进程是否运行
```bash
# 在服务器上
ps aux | grep "bun.*index.ts"
# 或如果使用 PM2
pm2 list | grep cloud-mcp
# 或如果使用 systemd
sudo systemctl status cloud-mcp
```
### 2. 测试 MCP 服务器
```bash
# 直接运行测试
cd /opt/cloud-mcp
bun run src/index.ts
# 应该看到:
# [INFO] Registering tools...
# [INFO] All tools registered. Starting MCP server...
# [INFO] MCP Server started
```
### 3. 在 Cursor 中测试
1. 配置 MCP 服务器后
2. 重启 Cursor
3. 尝试使用任何工具(如 `deploy_status`
4. 如果连接成功,工具会正常执行
## 常见问题
### Q: 为什么看不到端口监听?
**A**: MCP 使用 stdio不需要端口。这是 MCP 协议的设计。
### Q: 如何知道服务器是否在运行?
**A**: 检查进程:
```bash
ps aux | grep "bun.*index.ts"
```
### Q: 可以添加 HTTP 端点吗?
**A**: 可以,但需要修改代码使用 SSE 传输。当前实现使用 stdio这是 MCP 的标准方式。
### Q: 如何调试连接问题?
**A**:
1. 检查 Cursor 的 MCP 日志
2. 在服务器上直接运行 `bun run src/index.ts` 查看错误
3. 检查 SSH 连接是否正常(如果使用远程服务器)
## 添加健康检查端点(可选)
如果你需要一个 HTTP 端点来检查服务状态,可以添加一个简单的 HTTP 服务器:
```typescript
// 在 src/index.ts 中添加
import { serve } from "bun";
// 健康检查端点(可选)
serve({
port: 3000,
fetch(req) {
if (req.url.endsWith("/health")) {
return new Response(JSON.stringify({ status: "ok" }), {
headers: { "Content-Type": "application/json" },
});
}
return new Response("Not Found", { status: 404 });
},
});
console.log("Health check server running on http://localhost:3000/health");
```
但这只是用于健康检查,**MCP 通信仍然通过 stdio**。
## 总结
- MCP 服务器**不监听端口**,使用 stdio 通信
- 在 Cursor 中配置时,指定 `command``args` 来启动进程
- 服务器运行时Cursor 会自动启动进程并通过 stdio 通信
- 这是 MCP 协议的标准工作方式

42
Makefile Normal file
View File

@@ -0,0 +1,42 @@
# Makefile for Cloud MCP Deployment
.PHONY: help build up down restart logs status deploy clean
# Default target
help:
@echo "Cloud MCP Deployment Commands:"
@echo " make build - Build Docker image"
@echo " make up - Start container"
@echo " make down - Stop container"
@echo " make restart - Restart container"
@echo " make logs - Show container logs"
@echo " make status - Show container status"
@echo " make deploy - Full deployment (pull, build, restart)"
@echo " make clean - Clean up unused Docker resources"
build:
docker-compose build
up:
docker-compose up -d
down:
docker-compose down
restart:
docker-compose restart
logs:
docker-compose logs -f
status:
docker-compose ps
@echo ""
@docker logs --tail 20 cloud-mcp 2>/dev/null || echo "Container not running"
deploy:
@./deploy.sh --pull --rebuild
clean:
docker system prune -f

113
QUICK_START.md Normal file
View File

@@ -0,0 +1,113 @@
# 快速部署指南
## 服务器端设置(一次性)
### 1. 克隆项目
```bash
git clone <your-gitea-repo-url> /opt/cloud-mcp
cd /opt/cloud-mcp
```
### 2. 配置环境变量
```bash
cp env.template .env
nano .env # 编辑配置
```
### 3. 设置脚本权限
```bash
chmod +x deploy.sh deploy-gitea.sh
```
### 4. 首次部署
```bash
./deploy.sh --rebuild
```
## Gitea Webhook 自动部署设置
### 方法一:使用 Git Hook推荐
```bash
# 在服务器上设置 post-receive hook
cd /opt/cloud-mcp
git config receive.denyCurrentBranch ignore
# 创建 hook 脚本
cat > .git/hooks/post-receive << 'EOF'
#!/bin/bash
cd /opt/cloud-mcp
git checkout -f
./deploy-gitea.sh
EOF
chmod +x .git/hooks/post-receive
```
### 方法二:使用 Gitea Webhook
1. 在 Gitea 仓库设置中添加 Webhook
2. URL: `http://your-server:port/hooks/deploy` (需要 webhook 服务器)
3. 或使用 SSH 方式触发部署脚本
## 日常使用
### 手动部署
```bash
# 在服务器上
cd /opt/cloud-mcp
./deploy-gitea.sh
```
### 查看状态
```bash
./deploy-gitea.sh --status
```
### 查看日志
```bash
./deploy-gitea.sh --logs
# 或
docker logs -f cloud-mcp
```
## 使用 Makefile可选
```bash
make deploy # 完整部署
make status # 查看状态
make logs # 查看日志
make restart # 重启
make down # 停止
make up # 启动
```
## 更新流程
1. **本地开发** → 提交代码 → 推送到 Gitea
2. **自动触发** → Webhook/Hook 自动运行 `deploy-gitea.sh`
3. **完成** → 容器自动更新并重启
## 故障排查
```bash
# 检查容器状态
docker ps -a | grep cloud-mcp
# 查看详细日志
docker logs cloud-mcp
# 检查部署日志
tail -f deploy.log
# 手动重启
./deploy-gitea.sh --restart
```

View File

@@ -67,6 +67,14 @@ ROUTER_HOST=your-router-ip
ROUTER_USERNAME=admin
ROUTER_PASSWORD=your-password
# Email Configuration (SMTP)
EMAIL_HOST=smtp.gmail.com
EMAIL_PORT=587
EMAIL_USER=your-email@gmail.com
EMAIL_PASSWORD=your-app-password
EMAIL_FROM=Your Name
EMAIL_SECURE=false
# API Keys (optional)
FOOTBALL_API_KEY=your-football-api-key
GAME_API_KEY=your-game-api-key
@@ -119,6 +127,14 @@ Example configuration for Claude Desktop (`claude_desktop_config.json`):
- `docs_bun` - Get Bun documentation
- `code_review` - Review code
- `code_optimize` - Get optimization suggestions
- `git_status` - Get git repository status
- `git_add` - Stage files for commit
- `git_commit` - Commit staged changes
- `git_push` - Push commits to remote
- `git_pull` - Pull latest changes from remote
- `git_log` - Show commit history
- `git_branch` - List, create, or delete branches
- `git_diff` - Show changes between commits or working directory
### DevOps
- `nas_list_files` - List NAS files
@@ -156,16 +172,39 @@ Example configuration for Claude Desktop (`claude_desktop_config.json`):
- `task_add` - Add a task
- `task_list` - List tasks
- `task_complete` - Complete a task
- `email_send` - Send email via SMTP (supports text, HTML, and attachments)
## Data Storage
All data is stored locally in JSON files in the `data/` directory:
- `codeSnippets.json` - Code snippets
- `notes.json` - Personal notes
- `tasks.json` - Tasks
- `babyMilestones.json` - Baby milestones
- `mathResources.json` - Math resources
- `gameWishlist.json` - Game wishlist
All data is stored in PostgreSQL database. The application requires a PostgreSQL database connection.
### Database Setup
1. **Configure Database Connection**
Set the `DATABASE_URL` environment variable in your `.env` file:
```
DATABASE_URL=postgresql://user:password@host:port/database
```
2. **Initialize Database Schema**
Run the initialization script to create all required tables:
```bash
bun run init-db
```
This will create the following tables:
- `code_snippets` - Code snippets
- `notes` - Personal notes
- `tasks` - Tasks
- `baby_milestones` - Baby milestones
- `math_resources` - Math resources
- `game_wishlist` - Game wishlist
### Database Schema
The database schema is defined in `src/storage/schema.sql`. All tables include appropriate indexes for optimal query performance.
## Development

53
README_DEPLOY.md Normal file
View File

@@ -0,0 +1,53 @@
# 部署方案对比
## 方案一:直接运行(推荐 ⭐)
**优点**
- ✅ 最简单,无需 Docker
- ✅ MCP 工具可以直接操作服务器
- ✅ 无需 SSH 配置
- ✅ 资源占用更少
**适用场景**:个人项目、小规模部署
**快速开始**
```bash
# 在服务器上
git clone <repo> /opt/cloud-mcp
cd /opt/cloud-mcp
bun install
bun run start
```
详细文档:查看 `DIRECT_DEPLOY.md`
## 方案二Docker 部署
**优点**
- ✅ 环境隔离
- ✅ 易于管理
- ✅ 适合生产环境
**适用场景**:生产环境、需要隔离的场景
详细文档:查看 `DEPLOY.md`
## 推荐方案
对于你的使用场景(个人 MCP 服务器),**推荐使用方案一(直接运行)**
1. 在服务器上直接运行 MCP
2. 通过 Cursor 的 MCP 连接直接访问
3. 使用 `deploy_update` 工具一键部署更新
这样就不需要:
- ❌ Gitea Actions Runner
- ❌ SSH 配置
- ❌ Webhook 服务器
- ❌ Docker 容器
只需要:
- ✅ 在服务器上运行 MCP
- ✅ 在 Cursor 中配置 MCP 连接
- ✅ 使用部署工具更新

178
RUNNER_SETUP.md Normal file
View File

@@ -0,0 +1,178 @@
# Gitea Act Runner 快速设置指南
## 问题:没有匹配 ubuntu-latest 标签的在线运行器
这个错误表示工作流找不到匹配标签的 Runner。有两种解决方案
## 解决方案一:使用 self-hosted 标签(推荐,最简单)
我已经将工作流文件更新为使用 `runs-on: self-hosted`,这样会匹配任何自托管的 Runner不需要特定标签。
### 注册 Runner使用 self-hosted
```bash
# 1. 下载 Act Runner
cd /tmp
wget https://gitea.com/gitea/act_runner/releases/download/v0.2.6/act_runner-linux-amd64
chmod +x act_runner-linux-amd64
sudo mv act_runner-linux-amd64 /usr/local/bin/act_runner
# 2. 注册 Runner不指定标签或使用 self-hosted
act_runner register \
--instance <your-gitea-url> \
--token <runner-token> \
--name my-runner
# 或者明确指定 self-hosted 标签
act_runner register \
--instance <your-gitea-url> \
--token <runner-token> \
--name my-runner \
--labels self-hosted
```
**获取 Runner Token**
1. 进入 Gitea**Site Administration** → **Actions****Runners**
2. 点击 **New Runner**
3. 复制显示的 Token
### 运行 Runner
```bash
# 直接运行(测试用)
act_runner daemon
# 或作为系统服务运行(推荐)
sudo tee /etc/systemd/system/gitea-act-runner.service > /dev/null <<EOF
[Unit]
Description=Gitea Act Runner
After=network.target
[Service]
Type=simple
User=root
WorkingDirectory=/opt
ExecStart=/usr/local/bin/act_runner daemon
Restart=always
RestartSec=5
[Install]
WantedBy=multi-user.target
EOF
sudo systemctl daemon-reload
sudo systemctl enable gitea-act-runner
sudo systemctl start gitea-act-runner
sudo systemctl status gitea-act-runner
```
## 解决方案二:注册带有 ubuntu-latest 标签的 Runner
如果你想使用 `ubuntu-latest` 标签,注册时需要指定:
```bash
act_runner register \
--instance <your-gitea-url> \
--token <runner-token> \
--name my-runner \
--labels ubuntu-latest:docker://node:20-bullseye
```
## 验证 Runner 状态
### 1. 检查 Runner 是否运行
```bash
# 检查进程
ps aux | grep act_runner
# 检查服务状态
sudo systemctl status gitea-act-runner
# 查看日志
sudo journalctl -u gitea-act-runner -f
```
### 2. 在 Gitea 中查看 Runner
1. 进入 **Site Administration****Actions****Runners**
2. 应该能看到你的 Runner 显示为 **在线** 状态
3. 检查 Runner 的标签是否包含 `self-hosted``ubuntu-latest`
### 3. 测试工作流
1. 在 Gitea 仓库中进入 **Actions** 标签页
2. 选择 **Deploy to Server** 工作流
3. 点击 **Run workflow**
4. 应该不再显示"没有匹配的 Runner"错误
## 常见问题
### Runner 注册失败
**错误:连接被拒绝**
- 检查 Gitea URL 是否正确
- 确保 Gitea Actions 已启用
- 检查网络连接
**错误Token 无效**
- 确保从 Gitea 正确复制了 Token
- Token 有时效性,如果过期需要重新生成
### Runner 显示离线
1. **检查 Runner 进程**
```bash
ps aux | grep act_runner
```
2. **重启 Runner**
```bash
sudo systemctl restart gitea-act-runner
```
3. **查看日志**
```bash
sudo journalctl -u gitea-act-runner -n 50
```
### 工作流仍然找不到 Runner
1. **检查标签匹配**
- 工作流使用 `runs-on: self-hosted`
- Runner 必须注册时包含 `self-hosted` 标签,或者不指定标签(默认匹配)
2. **重新注册 Runner**
```bash
# 停止 Runner
sudo systemctl stop gitea-act-runner
# 删除旧配置(通常在 ~/.config/act_runner/
rm -rf ~/.config/act_runner
# 重新注册
act_runner register --instance <url> --token <token> --name my-runner --labels self-hosted
# 启动 Runner
sudo systemctl start gitea-act-runner
```
## 快速检查清单
- [ ] Act Runner 已下载并安装
- [ ] Runner 已成功注册
- [ ] Runner 正在运行(`ps aux | grep act_runner`
- [ ] 在 Gitea 中能看到 Runner 显示为在线
- [ ] Runner 标签包含 `self-hosted`(或不指定标签)
- [ ] 工作流文件使用 `runs-on: self-hosted`
- [ ] Gitea Secrets 已配置SERVER_HOST, SERVER_USERNAME, SERVER_SSH_KEY
## 下一步
完成 Runner 设置后:
1. 推送代码触发工作流,或
2. 在 Gitea Actions 页面手动触发工作流
3. 查看工作流执行日志
如果还有问题,查看 Gitea 和 Runner 的日志获取详细错误信息。

166
bun.lock
View File

@@ -7,22 +7,166 @@
"dependencies": {
"@modelcontextprotocol/sdk": "^1.0.4",
"axios": "^1.7.7",
"nodemailer": "^6.9.8",
"ssh2": "^1.15.0",
},
"devDependencies": {
"@types/node": "^22.7.9",
"@types/nodemailer": "^6.4.14",
"@types/ssh2": "^1.15.4",
"typescript": "^5.6.3",
},
},
},
"packages": {
"@aws-crypto/sha256-browser": ["@aws-crypto/sha256-browser@5.2.0", "", { "dependencies": { "@aws-crypto/sha256-js": "^5.2.0", "@aws-crypto/supports-web-crypto": "^5.2.0", "@aws-crypto/util": "^5.2.0", "@aws-sdk/types": "^3.222.0", "@aws-sdk/util-locate-window": "^3.0.0", "@smithy/util-utf8": "^2.0.0", "tslib": "^2.6.2" } }, "sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw=="],
"@aws-crypto/sha256-js": ["@aws-crypto/sha256-js@5.2.0", "", { "dependencies": { "@aws-crypto/util": "^5.2.0", "@aws-sdk/types": "^3.222.0", "tslib": "^2.6.2" } }, "sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA=="],
"@aws-crypto/supports-web-crypto": ["@aws-crypto/supports-web-crypto@5.2.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg=="],
"@aws-crypto/util": ["@aws-crypto/util@5.2.0", "", { "dependencies": { "@aws-sdk/types": "^3.222.0", "@smithy/util-utf8": "^2.0.0", "tslib": "^2.6.2" } }, "sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ=="],
"@aws-sdk/client-ses": ["@aws-sdk/client-ses@3.964.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.964.0", "@aws-sdk/credential-provider-node": "3.964.0", "@aws-sdk/middleware-host-header": "3.957.0", "@aws-sdk/middleware-logger": "3.957.0", "@aws-sdk/middleware-recursion-detection": "3.957.0", "@aws-sdk/middleware-user-agent": "3.964.0", "@aws-sdk/region-config-resolver": "3.957.0", "@aws-sdk/types": "3.957.0", "@aws-sdk/util-endpoints": "3.957.0", "@aws-sdk/util-user-agent-browser": "3.957.0", "@aws-sdk/util-user-agent-node": "3.964.0", "@smithy/config-resolver": "^4.4.5", "@smithy/core": "^3.20.0", "@smithy/fetch-http-handler": "^5.3.8", "@smithy/hash-node": "^4.2.7", "@smithy/invalid-dependency": "^4.2.7", "@smithy/middleware-content-length": "^4.2.7", "@smithy/middleware-endpoint": "^4.4.1", "@smithy/middleware-retry": "^4.4.17", "@smithy/middleware-serde": "^4.2.8", "@smithy/middleware-stack": "^4.2.7", "@smithy/node-config-provider": "^4.3.7", "@smithy/node-http-handler": "^4.4.7", "@smithy/protocol-http": "^5.3.7", "@smithy/smithy-client": "^4.10.2", "@smithy/types": "^4.11.0", "@smithy/url-parser": "^4.2.7", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.16", "@smithy/util-defaults-mode-node": "^4.2.19", "@smithy/util-endpoints": "^3.2.7", "@smithy/util-middleware": "^4.2.7", "@smithy/util-retry": "^4.2.7", "@smithy/util-utf8": "^4.2.0", "@smithy/util-waiter": "^4.2.7", "tslib": "^2.6.2" } }, "sha512-gAbDlXvNP5Sb2tS4tJYmOS6/frmba2tryJ4MzJVeR1ad8sSa94GQx7XbR7HyCi5VtJpOSk7Uibp3aKzK3+sWsg=="],
"@aws-sdk/client-sso": ["@aws-sdk/client-sso@3.964.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.964.0", "@aws-sdk/middleware-host-header": "3.957.0", "@aws-sdk/middleware-logger": "3.957.0", "@aws-sdk/middleware-recursion-detection": "3.957.0", "@aws-sdk/middleware-user-agent": "3.964.0", "@aws-sdk/region-config-resolver": "3.957.0", "@aws-sdk/types": "3.957.0", "@aws-sdk/util-endpoints": "3.957.0", "@aws-sdk/util-user-agent-browser": "3.957.0", "@aws-sdk/util-user-agent-node": "3.964.0", "@smithy/config-resolver": "^4.4.5", "@smithy/core": "^3.20.0", "@smithy/fetch-http-handler": "^5.3.8", "@smithy/hash-node": "^4.2.7", "@smithy/invalid-dependency": "^4.2.7", "@smithy/middleware-content-length": "^4.2.7", "@smithy/middleware-endpoint": "^4.4.1", "@smithy/middleware-retry": "^4.4.17", "@smithy/middleware-serde": "^4.2.8", "@smithy/middleware-stack": "^4.2.7", "@smithy/node-config-provider": "^4.3.7", "@smithy/node-http-handler": "^4.4.7", "@smithy/protocol-http": "^5.3.7", "@smithy/smithy-client": "^4.10.2", "@smithy/types": "^4.11.0", "@smithy/url-parser": "^4.2.7", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.16", "@smithy/util-defaults-mode-node": "^4.2.19", "@smithy/util-endpoints": "^3.2.7", "@smithy/util-middleware": "^4.2.7", "@smithy/util-retry": "^4.2.7", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-IenVyY8Io2CwBgmS22xk/H5LibmSbvLnPA9oFqLORO6Ji1Ks8z/ow+ud/ZurVjFekz3LD/uxVFX3ZKGo6N7Byw=="],
"@aws-sdk/core": ["@aws-sdk/core@3.964.0", "", { "dependencies": { "@aws-sdk/types": "3.957.0", "@aws-sdk/xml-builder": "3.957.0", "@smithy/core": "^3.20.0", "@smithy/node-config-provider": "^4.3.7", "@smithy/property-provider": "^4.2.7", "@smithy/protocol-http": "^5.3.7", "@smithy/signature-v4": "^5.3.7", "@smithy/smithy-client": "^4.10.2", "@smithy/types": "^4.11.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-middleware": "^4.2.7", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-1gIfbt0KRxI8am1UYFcIxQ5QKb22JyN3k52sxyrKXJYC8Knn/rTUAZbYti45CfETe5PLadInGvWqClwGRlZKNg=="],
"@aws-sdk/credential-provider-env": ["@aws-sdk/credential-provider-env@3.964.0", "", { "dependencies": { "@aws-sdk/core": "3.964.0", "@aws-sdk/types": "3.957.0", "@smithy/property-provider": "^4.2.7", "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-jWNSXOOBMYuxzI2rXi8x91YL07dhomyGzzh0CdaLej0LRmknmDrZcZNkVpa7Fredy1PFcmOlokwCS5PmZMN8ZQ=="],
"@aws-sdk/credential-provider-http": ["@aws-sdk/credential-provider-http@3.964.0", "", { "dependencies": { "@aws-sdk/core": "3.964.0", "@aws-sdk/types": "3.957.0", "@smithy/fetch-http-handler": "^5.3.8", "@smithy/node-http-handler": "^4.4.7", "@smithy/property-provider": "^4.2.7", "@smithy/protocol-http": "^5.3.7", "@smithy/smithy-client": "^4.10.2", "@smithy/types": "^4.11.0", "@smithy/util-stream": "^4.5.8", "tslib": "^2.6.2" } }, "sha512-up7dl6vcaoXuYSwGXDvx8RnF8Lwj3jGChhyUR7krZOXLarIfUUN3ILOZnVNK5s/HnVNkEILlkdPvjhr9LVC1/Q=="],
"@aws-sdk/credential-provider-ini": ["@aws-sdk/credential-provider-ini@3.964.0", "", { "dependencies": { "@aws-sdk/core": "3.964.0", "@aws-sdk/credential-provider-env": "3.964.0", "@aws-sdk/credential-provider-http": "3.964.0", "@aws-sdk/credential-provider-login": "3.964.0", "@aws-sdk/credential-provider-process": "3.964.0", "@aws-sdk/credential-provider-sso": "3.964.0", "@aws-sdk/credential-provider-web-identity": "3.964.0", "@aws-sdk/nested-clients": "3.964.0", "@aws-sdk/types": "3.957.0", "@smithy/credential-provider-imds": "^4.2.7", "@smithy/property-provider": "^4.2.7", "@smithy/shared-ini-file-loader": "^4.4.2", "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-t4FN9qTWU4nXDU6EQ6jopvyhXw0dbQ3n+3g6x5hmc1ECFAqA+xmFd1i5LljdZCi79cUXHduQWwvW8RJHMf0qJw=="],
"@aws-sdk/credential-provider-login": ["@aws-sdk/credential-provider-login@3.964.0", "", { "dependencies": { "@aws-sdk/core": "3.964.0", "@aws-sdk/nested-clients": "3.964.0", "@aws-sdk/types": "3.957.0", "@smithy/property-provider": "^4.2.7", "@smithy/protocol-http": "^5.3.7", "@smithy/shared-ini-file-loader": "^4.4.2", "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-c64dmTizMkJXDRzN3NYPTmUpKxegr5lmLOYPeQ60Zcbft6HFwPme8Gwy8pNxO4gG1fw6Ja2Vu6fZuSTn8aDFOQ=="],
"@aws-sdk/credential-provider-node": ["@aws-sdk/credential-provider-node@3.964.0", "", { "dependencies": { "@aws-sdk/credential-provider-env": "3.964.0", "@aws-sdk/credential-provider-http": "3.964.0", "@aws-sdk/credential-provider-ini": "3.964.0", "@aws-sdk/credential-provider-process": "3.964.0", "@aws-sdk/credential-provider-sso": "3.964.0", "@aws-sdk/credential-provider-web-identity": "3.964.0", "@aws-sdk/types": "3.957.0", "@smithy/credential-provider-imds": "^4.2.7", "@smithy/property-provider": "^4.2.7", "@smithy/shared-ini-file-loader": "^4.4.2", "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-FHxDXPOj888/qc/X8s0x4aUBdp4Y3k9VePRehUJBWRhhTsAyuIJis5V0iQeY1qvtqHXYa2qd1EZHGJ3bTjHxSw=="],
"@aws-sdk/credential-provider-process": ["@aws-sdk/credential-provider-process@3.964.0", "", { "dependencies": { "@aws-sdk/core": "3.964.0", "@aws-sdk/types": "3.957.0", "@smithy/property-provider": "^4.2.7", "@smithy/shared-ini-file-loader": "^4.4.2", "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-HaTLKqj3jeZY88E/iBjsNJsXgmRTTT7TghqeRiF8FKb/7UY1xEvasBO0c1xqfOye8dsyt35nTfTTyIsd/CBfww=="],
"@aws-sdk/credential-provider-sso": ["@aws-sdk/credential-provider-sso@3.964.0", "", { "dependencies": { "@aws-sdk/client-sso": "3.964.0", "@aws-sdk/core": "3.964.0", "@aws-sdk/token-providers": "3.964.0", "@aws-sdk/types": "3.957.0", "@smithy/property-provider": "^4.2.7", "@smithy/shared-ini-file-loader": "^4.4.2", "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-oR78TjSpjVf1IpPWQnGHEGqlnQs+K4f5nCxLK2P6JDPprXay6oknsoSiU4x2urav6VCyMPMC9KTCGjBoFKUIxQ=="],
"@aws-sdk/credential-provider-web-identity": ["@aws-sdk/credential-provider-web-identity@3.964.0", "", { "dependencies": { "@aws-sdk/core": "3.964.0", "@aws-sdk/nested-clients": "3.964.0", "@aws-sdk/types": "3.957.0", "@smithy/property-provider": "^4.2.7", "@smithy/shared-ini-file-loader": "^4.4.2", "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-07JQDmbjZjOt3nL/j1wTcvQqjmPkynQYftUV/ooZ+qTbmJXFbCBdal1VCElyeiu0AgBq9dfhw0rBBcbND1ZMlA=="],
"@aws-sdk/middleware-host-header": ["@aws-sdk/middleware-host-header@3.957.0", "", { "dependencies": { "@aws-sdk/types": "3.957.0", "@smithy/protocol-http": "^5.3.7", "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-BBgKawVyfQZglEkNTuBBdC3azlyqNXsvvN4jPkWAiNYcY0x1BasaJFl+7u/HisfULstryweJq/dAvIZIxzlZaA=="],
"@aws-sdk/middleware-logger": ["@aws-sdk/middleware-logger@3.957.0", "", { "dependencies": { "@aws-sdk/types": "3.957.0", "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-w1qfKrSKHf9b5a8O76yQ1t69u6NWuBjr5kBX+jRWFx/5mu6RLpqERXRpVJxfosbep7k3B+DSB5tZMZ82GKcJtQ=="],
"@aws-sdk/middleware-recursion-detection": ["@aws-sdk/middleware-recursion-detection@3.957.0", "", { "dependencies": { "@aws-sdk/types": "3.957.0", "@aws/lambda-invoke-store": "^0.2.2", "@smithy/protocol-http": "^5.3.7", "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-D2H/WoxhAZNYX+IjkKTdOhOkWQaK0jjJrDBj56hKjU5c9ltQiaX/1PqJ4dfjHntEshJfu0w+E6XJ+/6A6ILBBA=="],
"@aws-sdk/middleware-user-agent": ["@aws-sdk/middleware-user-agent@3.964.0", "", { "dependencies": { "@aws-sdk/core": "3.964.0", "@aws-sdk/types": "3.957.0", "@aws-sdk/util-endpoints": "3.957.0", "@smithy/core": "^3.20.0", "@smithy/protocol-http": "^5.3.7", "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-/QyBl8WLNtqw3ucyAggumQXVCi8GRxaDGE1ElyYMmacfiwHl37S9y8JVW/QLL1lIEXGcsrhMUKV3pyFJFALA7w=="],
"@aws-sdk/nested-clients": ["@aws-sdk/nested-clients@3.964.0", "", { "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", "@aws-sdk/core": "3.964.0", "@aws-sdk/middleware-host-header": "3.957.0", "@aws-sdk/middleware-logger": "3.957.0", "@aws-sdk/middleware-recursion-detection": "3.957.0", "@aws-sdk/middleware-user-agent": "3.964.0", "@aws-sdk/region-config-resolver": "3.957.0", "@aws-sdk/types": "3.957.0", "@aws-sdk/util-endpoints": "3.957.0", "@aws-sdk/util-user-agent-browser": "3.957.0", "@aws-sdk/util-user-agent-node": "3.964.0", "@smithy/config-resolver": "^4.4.5", "@smithy/core": "^3.20.0", "@smithy/fetch-http-handler": "^5.3.8", "@smithy/hash-node": "^4.2.7", "@smithy/invalid-dependency": "^4.2.7", "@smithy/middleware-content-length": "^4.2.7", "@smithy/middleware-endpoint": "^4.4.1", "@smithy/middleware-retry": "^4.4.17", "@smithy/middleware-serde": "^4.2.8", "@smithy/middleware-stack": "^4.2.7", "@smithy/node-config-provider": "^4.3.7", "@smithy/node-http-handler": "^4.4.7", "@smithy/protocol-http": "^5.3.7", "@smithy/smithy-client": "^4.10.2", "@smithy/types": "^4.11.0", "@smithy/url-parser": "^4.2.7", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", "@smithy/util-defaults-mode-browser": "^4.3.16", "@smithy/util-defaults-mode-node": "^4.2.19", "@smithy/util-endpoints": "^3.2.7", "@smithy/util-middleware": "^4.2.7", "@smithy/util-retry": "^4.2.7", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-ql+ftRwjyZkZeG3qbrRJFVmNR0id83WEUqhFVjvrQMWspNApBhz0Ar4YVSn7Uv0QaKkaR7ALPtmdMzFr3/E4bQ=="],
"@aws-sdk/region-config-resolver": ["@aws-sdk/region-config-resolver@3.957.0", "", { "dependencies": { "@aws-sdk/types": "3.957.0", "@smithy/config-resolver": "^4.4.5", "@smithy/node-config-provider": "^4.3.7", "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-V8iY3blh8l2iaOqXWW88HbkY5jDoWjH56jonprG/cpyqqCnprvpMUZWPWYJoI8rHRf2bqzZeql1slxG6EnKI7A=="],
"@aws-sdk/token-providers": ["@aws-sdk/token-providers@3.964.0", "", { "dependencies": { "@aws-sdk/core": "3.964.0", "@aws-sdk/nested-clients": "3.964.0", "@aws-sdk/types": "3.957.0", "@smithy/property-provider": "^4.2.7", "@smithy/shared-ini-file-loader": "^4.4.2", "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-UqouLQbYepZnMFJGB/DVpA5GhF9uT98vNWSMz9PVbhgEPUKa73FECRT6YFZvZOh8kA+0JiENrnmS6d93I70ykQ=="],
"@aws-sdk/types": ["@aws-sdk/types@3.957.0", "", { "dependencies": { "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-wzWC2Nrt859ABk6UCAVY/WYEbAd7FjkdrQL6m24+tfmWYDNRByTJ9uOgU/kw9zqLCAwb//CPvrJdhqjTznWXAg=="],
"@aws-sdk/util-endpoints": ["@aws-sdk/util-endpoints@3.957.0", "", { "dependencies": { "@aws-sdk/types": "3.957.0", "@smithy/types": "^4.11.0", "@smithy/url-parser": "^4.2.7", "@smithy/util-endpoints": "^3.2.7", "tslib": "^2.6.2" } }, "sha512-xwF9K24mZSxcxKS3UKQFeX/dPYkEps9wF1b+MGON7EvnbcucrJGyQyK1v1xFPn1aqXkBTFi+SZaMRx5E5YCVFw=="],
"@aws-sdk/util-locate-window": ["@aws-sdk/util-locate-window@3.957.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-nhmgKHnNV9K+i9daumaIz8JTLsIIML9PE/HUks5liyrjUzenjW/aHoc7WJ9/Td/gPZtayxFnXQSJRb/fDlBuJw=="],
"@aws-sdk/util-user-agent-browser": ["@aws-sdk/util-user-agent-browser@3.957.0", "", { "dependencies": { "@aws-sdk/types": "3.957.0", "@smithy/types": "^4.11.0", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "sha512-exueuwxef0lUJRnGaVkNSC674eAiWU07ORhxBnevFFZEKisln+09Qrtw823iyv5I1N8T+wKfh95xvtWQrNKNQw=="],
"@aws-sdk/util-user-agent-node": ["@aws-sdk/util-user-agent-node@3.964.0", "", { "dependencies": { "@aws-sdk/middleware-user-agent": "3.964.0", "@aws-sdk/types": "3.957.0", "@smithy/node-config-provider": "^4.3.7", "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "peerDependencies": { "aws-crt": ">=1.0.0" }, "optionalPeers": ["aws-crt"] }, "sha512-jgob8Z/bZIh1dwEgLqE12q+aCf0ieLy7anT8bWpqMijMJqsnrPBToa7smSykfom9YHrdOgrQhXswMpE75dzLRw=="],
"@aws-sdk/xml-builder": ["@aws-sdk/xml-builder@3.957.0", "", { "dependencies": { "@smithy/types": "^4.11.0", "fast-xml-parser": "5.2.5", "tslib": "^2.6.2" } }, "sha512-Ai5iiQqS8kJ5PjzMhWcLKN0G2yasAkvpnPlq2EnqlIMdB48HsizElt62qcktdxp4neRMyGkFq4NzgmDbXnhRiA=="],
"@aws/lambda-invoke-store": ["@aws/lambda-invoke-store@0.2.2", "", {}, "sha512-C0NBLsIqzDIae8HFw9YIrIBsbc0xTiOtt7fAukGPnqQ/+zZNaq+4jhuccltK0QuWHBnNm/a6kLIRA6GFiM10eg=="],
"@hono/node-server": ["@hono/node-server@1.19.7", "", { "peerDependencies": { "hono": "^4" } }, "sha512-vUcD0uauS7EU2caukW8z5lJKtoGMokxNbJtBiwHgpqxEXokaHCBkQUmCHhjFB1VUTWdqj25QoMkMKzgjq+uhrw=="],
"@modelcontextprotocol/sdk": ["@modelcontextprotocol/sdk@1.25.1", "", { "dependencies": { "@hono/node-server": "^1.19.7", "ajv": "^8.17.1", "ajv-formats": "^3.0.1", "content-type": "^1.0.5", "cors": "^2.8.5", "cross-spawn": "^7.0.5", "eventsource": "^3.0.2", "eventsource-parser": "^3.0.0", "express": "^5.0.1", "express-rate-limit": "^7.5.0", "jose": "^6.1.1", "json-schema-typed": "^8.0.2", "pkce-challenge": "^5.0.0", "raw-body": "^3.0.0", "zod": "^3.25 || ^4.0", "zod-to-json-schema": "^3.25.0" }, "peerDependencies": { "@cfworker/json-schema": "^4.1.1" }, "optionalPeers": ["@cfworker/json-schema"] }, "sha512-yO28oVFFC7EBoiKdAn+VqRm+plcfv4v0xp6osG/VsCB0NlPZWi87ajbCZZ8f/RvOFLEu7//rSRmuZZ7lMoe3gQ=="],
"@smithy/abort-controller": ["@smithy/abort-controller@4.2.7", "", { "dependencies": { "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-rzMY6CaKx2qxrbYbqjXWS0plqEy7LOdKHS0bg4ixJ6aoGDPNUcLWk/FRNuCILh7GKLG9TFUXYYeQQldMBBwuyw=="],
"@smithy/config-resolver": ["@smithy/config-resolver@4.4.5", "", { "dependencies": { "@smithy/node-config-provider": "^4.3.7", "@smithy/types": "^4.11.0", "@smithy/util-config-provider": "^4.2.0", "@smithy/util-endpoints": "^3.2.7", "@smithy/util-middleware": "^4.2.7", "tslib": "^2.6.2" } }, "sha512-HAGoUAFYsUkoSckuKbCPayECeMim8pOu+yLy1zOxt1sifzEbrsRpYa+mKcMdiHKMeiqOibyPG0sFJnmaV/OGEg=="],
"@smithy/core": ["@smithy/core@3.20.0", "", { "dependencies": { "@smithy/middleware-serde": "^4.2.8", "@smithy/protocol-http": "^5.3.7", "@smithy/types": "^4.11.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-middleware": "^4.2.7", "@smithy/util-stream": "^4.5.8", "@smithy/util-utf8": "^4.2.0", "@smithy/uuid": "^1.1.0", "tslib": "^2.6.2" } }, "sha512-WsSHCPq/neD5G/MkK4csLI5Y5Pkd9c1NMfpYEKeghSGaD4Ja1qLIohRQf2D5c1Uy5aXp76DeKHkzWZ9KAlHroQ=="],
"@smithy/credential-provider-imds": ["@smithy/credential-provider-imds@4.2.7", "", { "dependencies": { "@smithy/node-config-provider": "^4.3.7", "@smithy/property-provider": "^4.2.7", "@smithy/types": "^4.11.0", "@smithy/url-parser": "^4.2.7", "tslib": "^2.6.2" } }, "sha512-CmduWdCiILCRNbQWFR0OcZlUPVtyE49Sr8yYL0rZQ4D/wKxiNzBNS/YHemvnbkIWj623fplgkexUd/c9CAKdoA=="],
"@smithy/fetch-http-handler": ["@smithy/fetch-http-handler@5.3.8", "", { "dependencies": { "@smithy/protocol-http": "^5.3.7", "@smithy/querystring-builder": "^4.2.7", "@smithy/types": "^4.11.0", "@smithy/util-base64": "^4.3.0", "tslib": "^2.6.2" } }, "sha512-h/Fi+o7mti4n8wx1SR6UHWLaakwHRx29sizvp8OOm7iqwKGFneT06GCSFhml6Bha5BT6ot5pj3CYZnCHhGC2Rg=="],
"@smithy/hash-node": ["@smithy/hash-node@4.2.7", "", { "dependencies": { "@smithy/types": "^4.11.0", "@smithy/util-buffer-from": "^4.2.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-PU/JWLTBCV1c8FtB8tEFnY4eV1tSfBc7bDBADHfn1K+uRbPgSJ9jnJp0hyjiFN2PMdPzxsf1Fdu0eo9fJ760Xw=="],
"@smithy/invalid-dependency": ["@smithy/invalid-dependency@4.2.7", "", { "dependencies": { "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-ncvgCr9a15nPlkhIUx3CU4d7E7WEuVJOV7fS7nnK2hLtPK9tYRBkMHQbhXU1VvvKeBm/O0x26OEoBq+ngFpOEQ=="],
"@smithy/is-array-buffer": ["@smithy/is-array-buffer@4.2.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-DZZZBvC7sjcYh4MazJSGiWMI2L7E0oCiRHREDzIxi/M2LY79/21iXt6aPLHge82wi5LsuRF5A06Ds3+0mlh6CQ=="],
"@smithy/middleware-content-length": ["@smithy/middleware-content-length@4.2.7", "", { "dependencies": { "@smithy/protocol-http": "^5.3.7", "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-GszfBfCcvt7kIbJ41LuNa5f0wvQCHhnGx/aDaZJCCT05Ld6x6U2s0xsc/0mBFONBZjQJp2U/0uSJ178OXOwbhg=="],
"@smithy/middleware-endpoint": ["@smithy/middleware-endpoint@4.4.1", "", { "dependencies": { "@smithy/core": "^3.20.0", "@smithy/middleware-serde": "^4.2.8", "@smithy/node-config-provider": "^4.3.7", "@smithy/shared-ini-file-loader": "^4.4.2", "@smithy/types": "^4.11.0", "@smithy/url-parser": "^4.2.7", "@smithy/util-middleware": "^4.2.7", "tslib": "^2.6.2" } }, "sha512-gpLspUAoe6f1M6H0u4cVuFzxZBrsGZmjx2O9SigurTx4PbntYa4AJ+o0G0oGm1L2oSX6oBhcGHwrfJHup2JnJg=="],
"@smithy/middleware-retry": ["@smithy/middleware-retry@4.4.17", "", { "dependencies": { "@smithy/node-config-provider": "^4.3.7", "@smithy/protocol-http": "^5.3.7", "@smithy/service-error-classification": "^4.2.7", "@smithy/smithy-client": "^4.10.2", "@smithy/types": "^4.11.0", "@smithy/util-middleware": "^4.2.7", "@smithy/util-retry": "^4.2.7", "@smithy/uuid": "^1.1.0", "tslib": "^2.6.2" } }, "sha512-MqbXK6Y9uq17h+4r0ogu/sBT6V/rdV+5NvYL7ZV444BKfQygYe8wAhDrVXagVebN6w2RE0Fm245l69mOsPGZzg=="],
"@smithy/middleware-serde": ["@smithy/middleware-serde@4.2.8", "", { "dependencies": { "@smithy/protocol-http": "^5.3.7", "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-8rDGYen5m5+NV9eHv9ry0sqm2gI6W7mc1VSFMtn6Igo25S507/HaOX9LTHAS2/J32VXD0xSzrY0H5FJtOMS4/w=="],
"@smithy/middleware-stack": ["@smithy/middleware-stack@4.2.7", "", { "dependencies": { "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-bsOT0rJ+HHlZd9crHoS37mt8qRRN/h9jRve1SXUhVbkRzu0QaNYZp1i1jha4n098tsvROjcwfLlfvcFuJSXEsw=="],
"@smithy/node-config-provider": ["@smithy/node-config-provider@4.3.7", "", { "dependencies": { "@smithy/property-provider": "^4.2.7", "@smithy/shared-ini-file-loader": "^4.4.2", "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-7r58wq8sdOcrwWe+klL9y3bc4GW1gnlfnFOuL7CXa7UzfhzhxKuzNdtqgzmTV+53lEp9NXh5hY/S4UgjLOzPfw=="],
"@smithy/node-http-handler": ["@smithy/node-http-handler@4.4.7", "", { "dependencies": { "@smithy/abort-controller": "^4.2.7", "@smithy/protocol-http": "^5.3.7", "@smithy/querystring-builder": "^4.2.7", "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-NELpdmBOO6EpZtWgQiHjoShs1kmweaiNuETUpuup+cmm/xJYjT4eUjfhrXRP4jCOaAsS3c3yPsP3B+K+/fyPCQ=="],
"@smithy/property-provider": ["@smithy/property-provider@4.2.7", "", { "dependencies": { "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-jmNYKe9MGGPoSl/D7JDDs1C8b3dC8f/w78LbaVfoTtWy4xAd5dfjaFG9c9PWPihY4ggMQNQSMtzU77CNgAJwmA=="],
"@smithy/protocol-http": ["@smithy/protocol-http@5.3.7", "", { "dependencies": { "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-1r07pb994I20dD/c2seaZhoCuNYm0rWrvBxhCQ70brNh11M5Ml2ew6qJVo0lclB3jMIXirD4s2XRXRe7QEi0xA=="],
"@smithy/querystring-builder": ["@smithy/querystring-builder@4.2.7", "", { "dependencies": { "@smithy/types": "^4.11.0", "@smithy/util-uri-escape": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-eKONSywHZxK4tBxe2lXEysh8wbBdvDWiA+RIuaxZSgCMmA0zMgoDpGLJhnyj+c0leOQprVnXOmcB4m+W9Rw7sg=="],
"@smithy/querystring-parser": ["@smithy/querystring-parser@4.2.7", "", { "dependencies": { "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-3X5ZvzUHmlSTHAXFlswrS6EGt8fMSIxX/c3Rm1Pni3+wYWB6cjGocmRIoqcQF9nU5OgGmL0u7l9m44tSUpfj9w=="],
"@smithy/service-error-classification": ["@smithy/service-error-classification@4.2.7", "", { "dependencies": { "@smithy/types": "^4.11.0" } }, "sha512-YB7oCbukqEb2Dlh3340/8g8vNGbs/QsNNRms+gv3N2AtZz9/1vSBx6/6tpwQpZMEJFs7Uq8h4mmOn48ZZ72MkA=="],
"@smithy/shared-ini-file-loader": ["@smithy/shared-ini-file-loader@4.4.2", "", { "dependencies": { "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-M7iUUff/KwfNunmrgtqBfvZSzh3bmFgv/j/t1Y1dQ+8dNo34br1cqVEqy6v0mYEgi0DkGO7Xig0AnuOaEGVlcg=="],
"@smithy/signature-v4": ["@smithy/signature-v4@5.3.7", "", { "dependencies": { "@smithy/is-array-buffer": "^4.2.0", "@smithy/protocol-http": "^5.3.7", "@smithy/types": "^4.11.0", "@smithy/util-hex-encoding": "^4.2.0", "@smithy/util-middleware": "^4.2.7", "@smithy/util-uri-escape": "^4.2.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-9oNUlqBlFZFOSdxgImA6X5GFuzE7V2H7VG/7E70cdLhidFbdtvxxt81EHgykGK5vq5D3FafH//X+Oy31j3CKOg=="],
"@smithy/smithy-client": ["@smithy/smithy-client@4.10.2", "", { "dependencies": { "@smithy/core": "^3.20.0", "@smithy/middleware-endpoint": "^4.4.1", "@smithy/middleware-stack": "^4.2.7", "@smithy/protocol-http": "^5.3.7", "@smithy/types": "^4.11.0", "@smithy/util-stream": "^4.5.8", "tslib": "^2.6.2" } }, "sha512-D5z79xQWpgrGpAHb054Fn2CCTQZpog7JELbVQ6XAvXs5MNKWf28U9gzSBlJkOyMl9LA1TZEjRtwvGXfP0Sl90g=="],
"@smithy/types": ["@smithy/types@4.11.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-mlrmL0DRDVe3mNrjTcVcZEgkFmufITfUAPBEA+AHYiIeYyJebso/He1qLbP3PssRe22KUzLRpQSdBPbXdgZ2VA=="],
"@smithy/url-parser": ["@smithy/url-parser@4.2.7", "", { "dependencies": { "@smithy/querystring-parser": "^4.2.7", "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-/RLtVsRV4uY3qPWhBDsjwahAtt3x2IsMGnP5W1b2VZIe+qgCqkLxI1UOHDZp1Q1QSOrdOR32MF3Ph2JfWT1VHg=="],
"@smithy/util-base64": ["@smithy/util-base64@4.3.0", "", { "dependencies": { "@smithy/util-buffer-from": "^4.2.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-GkXZ59JfyxsIwNTWFnjmFEI8kZpRNIBfxKjv09+nkAWPt/4aGaEWMM04m4sxgNVWkbt2MdSvE3KF/PfX4nFedQ=="],
"@smithy/util-body-length-browser": ["@smithy/util-body-length-browser@4.2.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-Fkoh/I76szMKJnBXWPdFkQJl2r9SjPt3cMzLdOB6eJ4Pnpas8hVoWPYemX/peO0yrrvldgCUVJqOAjUrOLjbxg=="],
"@smithy/util-body-length-node": ["@smithy/util-body-length-node@4.2.1", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-h53dz/pISVrVrfxV1iqXlx5pRg3V2YWFcSQyPyXZRrZoZj4R4DeWRDo1a7dd3CPTcFi3kE+98tuNyD2axyZReA=="],
"@smithy/util-buffer-from": ["@smithy/util-buffer-from@4.2.0", "", { "dependencies": { "@smithy/is-array-buffer": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-kAY9hTKulTNevM2nlRtxAG2FQ3B2OR6QIrPY3zE5LqJy1oxzmgBGsHLWTcNhWXKchgA0WHW+mZkQrng/pgcCew=="],
"@smithy/util-config-provider": ["@smithy/util-config-provider@4.2.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-YEjpl6XJ36FTKmD+kRJJWYvrHeUvm5ykaUS5xK+6oXffQPHeEM4/nXlZPe+Wu0lsgRUcNZiliYNh/y7q9c2y6Q=="],
"@smithy/util-defaults-mode-browser": ["@smithy/util-defaults-mode-browser@4.3.16", "", { "dependencies": { "@smithy/property-provider": "^4.2.7", "@smithy/smithy-client": "^4.10.2", "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-/eiSP3mzY3TsvUOYMeL4EqUX6fgUOj2eUOU4rMMgVbq67TiRLyxT7Xsjxq0bW3OwuzK009qOwF0L2OgJqperAQ=="],
"@smithy/util-defaults-mode-node": ["@smithy/util-defaults-mode-node@4.2.19", "", { "dependencies": { "@smithy/config-resolver": "^4.4.5", "@smithy/credential-provider-imds": "^4.2.7", "@smithy/node-config-provider": "^4.3.7", "@smithy/property-provider": "^4.2.7", "@smithy/smithy-client": "^4.10.2", "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-3a4+4mhf6VycEJyHIQLypRbiwG6aJvbQAeRAVXydMmfweEPnLLabRbdyo/Pjw8Rew9vjsh5WCdhmDaHkQnhhhA=="],
"@smithy/util-endpoints": ["@smithy/util-endpoints@3.2.7", "", { "dependencies": { "@smithy/node-config-provider": "^4.3.7", "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-s4ILhyAvVqhMDYREeTS68R43B1V5aenV5q/V1QpRQJkCXib5BPRo4s7uNdzGtIKxaPHCfU/8YkvPAEvTpxgspg=="],
"@smithy/util-hex-encoding": ["@smithy/util-hex-encoding@4.2.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-CCQBwJIvXMLKxVbO88IukazJD9a4kQ9ZN7/UMGBjBcJYvatpWk+9g870El4cB8/EJxfe+k+y0GmR9CAzkF+Nbw=="],
"@smithy/util-middleware": ["@smithy/util-middleware@4.2.7", "", { "dependencies": { "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-i1IkpbOae6NvIKsEeLLM9/2q4X+M90KV3oCFgWQI4q0Qz+yUZvsr+gZPdAEAtFhWQhAHpTsJO8DRJPuwVyln+w=="],
"@smithy/util-retry": ["@smithy/util-retry@4.2.7", "", { "dependencies": { "@smithy/service-error-classification": "^4.2.7", "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-SvDdsQyF5CIASa4EYVT02LukPHVzAgUA4kMAuZ97QJc2BpAqZfA4PINB8/KOoCXEw9tsuv/jQjMeaHFvxdLNGg=="],
"@smithy/util-stream": ["@smithy/util-stream@4.5.8", "", { "dependencies": { "@smithy/fetch-http-handler": "^5.3.8", "@smithy/node-http-handler": "^4.4.7", "@smithy/types": "^4.11.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-buffer-from": "^4.2.0", "@smithy/util-hex-encoding": "^4.2.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-ZnnBhTapjM0YPGUSmOs0Mcg/Gg87k503qG4zU2v/+Js2Gu+daKOJMeqcQns8ajepY8tgzzfYxl6kQyZKml6O2w=="],
"@smithy/util-uri-escape": ["@smithy/util-uri-escape@4.2.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-igZpCKV9+E/Mzrpq6YacdTQ0qTiLm85gD6N/IrmyDvQFA4UnU3d5g3m8tMT/6zG/vVkWSU+VxeUyGonL62DuxA=="],
"@smithy/util-utf8": ["@smithy/util-utf8@4.2.0", "", { "dependencies": { "@smithy/util-buffer-from": "^4.2.0", "tslib": "^2.6.2" } }, "sha512-zBPfuzoI8xyBtR2P6WQj63Rz8i3AmfAaJLuNG8dWsfvPe8lO4aCPYLn879mEgHndZH1zQ2oXmG8O1GGzzaoZiw=="],
"@smithy/util-waiter": ["@smithy/util-waiter@4.2.7", "", { "dependencies": { "@smithy/abort-controller": "^4.2.7", "@smithy/types": "^4.11.0", "tslib": "^2.6.2" } }, "sha512-vHJFXi9b7kUEpHWUCY3Twl+9NPOZvQ0SAi+Ewtn48mbiJk4JY9MZmKQjGB4SCvVb9WPiSphZJYY6RIbs+grrzw=="],
"@smithy/uuid": ["@smithy/uuid@1.1.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-4aUIteuyxtBUhVdiQqcDhKFitwfd9hqoSDYY2KRXiWtgoWJ9Bmise+KfEPDiVHWeJepvF8xJO9/9+WDIciMFFw=="],
"@types/node": ["@types/node@22.19.3", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-1N9SBnWYOJTrNZCdh/yJE+t910Y128BoyY+zBLWhL3r0TYzlTmFdXrPwHL9DyFZmlEXNQQolTZh3KHV31QDhyA=="],
"@types/nodemailer": ["@types/nodemailer@6.4.21", "", { "dependencies": { "@aws-sdk/client-ses": "^3.731.1", "@types/node": "*" } }, "sha512-Eix+sb/Nj28MNnWvO2X1OLrk5vuD4C9SMnb2Vf4itWnxphYeSceqkFX7IdmxTzn+dvmnNz7paMbg4Uc60wSfJg=="],
"@types/ssh2": ["@types/ssh2@1.15.5", "", { "dependencies": { "@types/node": "^18.11.18" } }, "sha512-N1ASjp/nXH3ovBHddRJpli4ozpk6UdDYIX4RJWFa9L1YKnzdhTlVmiGHm4DZnj/jLbqZpes4aeR30EFGQtvhQQ=="],
"accepts": ["accepts@2.0.0", "", { "dependencies": { "mime-types": "^3.0.0", "negotiator": "^1.0.0" } }, "sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng=="],
@@ -41,6 +185,8 @@
"body-parser": ["body-parser@2.2.1", "", { "dependencies": { "bytes": "^3.1.2", "content-type": "^1.0.5", "debug": "^4.4.3", "http-errors": "^2.0.0", "iconv-lite": "^0.7.0", "on-finished": "^2.4.1", "qs": "^6.14.0", "raw-body": "^3.0.1", "type-is": "^2.0.1" } }, "sha512-nfDwkulwiZYQIGwxdy0RUmowMhKcFVcYXUU7m4QlKYim1rUtg83xm2yjZ40QjDuc291AJjjeSc9b++AWHSgSHw=="],
"bowser": ["bowser@2.13.1", "", {}, "sha512-OHawaAbjwx6rqICCKgSG0SAnT05bzd7ppyKLVUITZpANBaaMFBAsaNkto3LoQ31tyFP5kNujE8Cdx85G9VzOkw=="],
"buildcheck": ["buildcheck@0.0.7", "", {}, "sha512-lHblz4ahamxpTmnsk+MNTRWsjYKv965MwOrSJyeD588rR3Jcu7swE+0wN5F+PbL5cjgu/9ObkhfzEPuofEMwLA=="],
"bytes": ["bytes@3.1.2", "", {}, "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg=="],
@@ -101,6 +247,8 @@
"fast-uri": ["fast-uri@3.1.0", "", {}, "sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA=="],
"fast-xml-parser": ["fast-xml-parser@5.2.5", "", { "dependencies": { "strnum": "^2.1.0" }, "bin": { "fxparser": "src/cli/cli.js" } }, "sha512-pfX9uG9Ki0yekDHx2SiuRIyFdyAr1kMIMitPvb0YBo8SUfKvia7w7FIyd/l6av85pFYRhZscS75MwMnbvY+hcQ=="],
"finalhandler": ["finalhandler@2.1.1", "", { "dependencies": { "debug": "^4.4.0", "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "on-finished": "^2.4.1", "parseurl": "^1.3.3", "statuses": "^2.0.1" } }, "sha512-S8KoZgRZN+a5rNwqTxlZZePjT/4cnm0ROV70LedRHZ0p8u9fRID0hJUZQpkKLzro8LfmC8sx23bY6tVNxv8pQA=="],
"follow-redirects": ["follow-redirects@1.15.11", "", {}, "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ=="],
@@ -161,6 +309,8 @@
"negotiator": ["negotiator@1.0.0", "", {}, "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg=="],
"nodemailer": ["nodemailer@6.10.1", "", {}, "sha512-Z+iLaBGVaSjbIzQ4pX6XV41HrooLsQ10ZWPUehGmuantvzWoDVBnmsdUcOIDM1t+yPor5pDhVlDESgOMEGxhHA=="],
"object-assign": ["object-assign@4.1.1", "", {}, "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg=="],
"object-inspect": ["object-inspect@1.13.4", "", {}, "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew=="],
@@ -215,8 +365,12 @@
"statuses": ["statuses@2.0.2", "", {}, "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw=="],
"strnum": ["strnum@2.1.2", "", {}, "sha512-l63NF9y/cLROq/yqKXSLtcMeeyOfnSQlfMSlzFt/K73oIaD8DGaQWd7Z34X9GPiKqP5rbSh84Hl4bOlLcjiSrQ=="],
"toidentifier": ["toidentifier@1.0.1", "", {}, "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA=="],
"tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
"tweetnacl": ["tweetnacl@0.14.5", "", {}, "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA=="],
"type-is": ["type-is@2.0.1", "", { "dependencies": { "content-type": "^1.0.5", "media-typer": "^1.1.0", "mime-types": "^3.0.0" } }, "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw=="],
@@ -237,12 +391,24 @@
"zod-to-json-schema": ["zod-to-json-schema@3.25.1", "", { "peerDependencies": { "zod": "^3.25 || ^4" } }, "sha512-pM/SU9d3YAggzi6MtR4h7ruuQlqKtad8e9S0fmxcMi+ueAK5Korys/aWcV9LIIHTVbj01NdzxcnXSN+O74ZIVA=="],
"@aws-crypto/sha256-browser/@smithy/util-utf8": ["@smithy/util-utf8@2.3.0", "", { "dependencies": { "@smithy/util-buffer-from": "^2.2.0", "tslib": "^2.6.2" } }, "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A=="],
"@aws-crypto/util/@smithy/util-utf8": ["@smithy/util-utf8@2.3.0", "", { "dependencies": { "@smithy/util-buffer-from": "^2.2.0", "tslib": "^2.6.2" } }, "sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A=="],
"@types/ssh2/@types/node": ["@types/node@18.19.130", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-GRaXQx6jGfL8sKfaIDD6OupbIHBr9jv7Jnaml9tB7l4v068PAOXqfcujMMo5PhbIs6ggR1XODELqahT2R8v0fg=="],
"form-data/mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="],
"@aws-crypto/sha256-browser/@smithy/util-utf8/@smithy/util-buffer-from": ["@smithy/util-buffer-from@2.2.0", "", { "dependencies": { "@smithy/is-array-buffer": "^2.2.0", "tslib": "^2.6.2" } }, "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA=="],
"@aws-crypto/util/@smithy/util-utf8/@smithy/util-buffer-from": ["@smithy/util-buffer-from@2.2.0", "", { "dependencies": { "@smithy/is-array-buffer": "^2.2.0", "tslib": "^2.6.2" } }, "sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA=="],
"@types/ssh2/@types/node/undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
"form-data/mime-types/mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="],
"@aws-crypto/sha256-browser/@smithy/util-utf8/@smithy/util-buffer-from/@smithy/is-array-buffer": ["@smithy/is-array-buffer@2.2.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA=="],
"@aws-crypto/util/@smithy/util-utf8/@smithy/util-buffer-from/@smithy/is-array-buffer": ["@smithy/is-array-buffer@2.2.0", "", { "dependencies": { "tslib": "^2.6.2" } }, "sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA=="],
}
}

238
deploy-gitea.sh Executable file
View File

@@ -0,0 +1,238 @@
#!/bin/bash
# Gitea Webhook Deployment Script
# This script is triggered by Gitea webhook on push events
# Configure in Gitea: Repository -> Settings -> Webhooks -> Add Webhook
set -e
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Configuration
PROJECT_DIR="/path/to/cloud-mcp" # Update this to your project path
CONTAINER_NAME="cloud-mcp"
COMPOSE_FILE="docker-compose.yml"
BRANCH="main" # or "master"
# Log file
LOG_FILE="${PROJECT_DIR}/deploy.log"
# Functions
log() {
local level=$1
shift
local message="$@"
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
echo -e "${timestamp} [${level}] ${message}" | tee -a "$LOG_FILE"
}
log_info() {
log "INFO" "${GREEN}$@${NC}"
}
log_warn() {
log "WARN" "${YELLOW}$@${NC}"
}
log_error() {
log "ERROR" "${RED}$@${NC}"
}
# Check if running in correct directory
check_directory() {
if [ ! -f "$PROJECT_DIR/docker-compose.yml" ]; then
log_error "Project directory not found: $PROJECT_DIR"
exit 1
fi
cd "$PROJECT_DIR"
log_info "Working directory: $(pwd)"
}
# Check if Docker is running
check_docker() {
if ! docker info > /dev/null 2>&1; then
log_error "Docker is not running"
exit 1
fi
log_info "Docker is running"
}
# Pull latest code
pull_latest() {
log_info "Pulling latest code from repository..."
# Fetch latest changes
git fetch origin "$BRANCH" || {
log_error "Failed to fetch from repository"
exit 1
}
# Check if there are updates
LOCAL=$(git rev-parse @)
REMOTE=$(git rev-parse "origin/${BRANCH}")
BASE=$(git merge-base @ "origin/${BRANCH}")
if [ "$LOCAL" = "$REMOTE" ]; then
log_info "Already up to date"
return 1
elif [ "$LOCAL" = "$BASE" ]; then
log_info "New commits found, pulling..."
git pull origin "$BRANCH" || {
log_error "Failed to pull from repository"
exit 1
}
return 0
else
log_warn "Local branch is ahead or diverged. Resetting to remote..."
git reset --hard "origin/${BRANCH}" || {
log_error "Failed to reset branch"
exit 1
}
return 0
fi
}
# Build Docker image
build_image() {
log_info "Building Docker image..."
docker-compose -f "$COMPOSE_FILE" build --no-cache || {
log_error "Failed to build Docker image"
exit 1
}
log_info "Docker image built successfully"
}
# Stop existing container
stop_container() {
log_info "Stopping existing container..."
docker-compose -f "$COMPOSE_FILE" down || {
log_warn "Failed to stop container (might not exist)"
}
log_info "Container stopped"
}
# Start container
start_container() {
log_info "Starting container..."
docker-compose -f "$COMPOSE_FILE" up -d || {
log_error "Failed to start container"
exit 1
}
log_info "Container started"
}
# Verify deployment
verify_deployment() {
log_info "Verifying deployment..."
sleep 3
if docker ps --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then
log_info "Container is running"
# Check container health
local status=$(docker inspect --format='{{.State.Status}}' "$CONTAINER_NAME" 2>/dev/null)
if [ "$status" = "running" ]; then
log_info "Deployment successful!"
return 0
else
log_error "Container is not running (status: $status)"
return 1
fi
else
log_error "Container not found"
return 1
fi
}
# Show container logs
show_logs() {
log_info "Recent container logs:"
docker logs --tail 30 "$CONTAINER_NAME" 2>&1 || log_warn "Could not fetch logs"
}
# Cleanup old images (optional)
cleanup_images() {
log_info "Cleaning up unused Docker images..."
docker image prune -f || log_warn "Failed to cleanup images"
}
# Main deployment flow
main() {
log_info "========================================="
log_info "Starting deployment process"
log_info "========================================="
check_directory
check_docker
# Pull latest code
if ! pull_latest; then
log_info "No updates to deploy"
exit 0
fi
# Build and deploy
build_image
stop_container
start_container
# Verify
if verify_deployment; then
show_logs
cleanup_images
log_info "========================================="
log_info "Deployment completed successfully!"
log_info "========================================="
else
log_error "Deployment verification failed"
show_logs
exit 1
fi
}
# Handle script arguments
case "${1:-}" in
--pull-only)
check_directory
pull_latest
;;
--rebuild)
check_directory
check_docker
build_image
stop_container
start_container
verify_deployment
;;
--status)
check_directory
docker ps --filter "name=${CONTAINER_NAME}" --format "table {{.Names}}\t{{.Status}}\t{{.Ports}}"
show_logs
;;
--logs)
check_directory
docker logs -f "$CONTAINER_NAME"
;;
--stop)
check_directory
stop_container
;;
--start)
check_directory
start_container
;;
--restart)
check_directory
stop_container
start_container
;;
*)
main
;;
esac

118
deploy.sh Executable file
View File

@@ -0,0 +1,118 @@
#!/bin/bash
# Cloud MCP Deployment Script
# This script builds and deploys the MCP server using Docker
# Usage: ./deploy.sh [--pull] [--rebuild]
set -e
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
# Configuration
CONTAINER_NAME="cloud-mcp"
IMAGE_NAME="cloud-mcp"
COMPOSE_FILE="docker-compose.yml"
# Functions
log_info() {
echo -e "${GREEN}[INFO]${NC} $1"
}
log_warn() {
echo -e "${YELLOW}[WARN]${NC} $1"
}
log_error() {
echo -e "${RED}[ERROR]${NC} $1"
}
# Check if Docker is running
check_docker() {
if ! docker info > /dev/null 2>&1; then
log_error "Docker is not running. Please start Docker and try again."
exit 1
fi
log_info "Docker is running"
}
# Pull latest code from git (if --pull flag is set)
pull_latest() {
if [[ "$1" == "--pull" ]] || [[ "$*" == *"--pull"* ]]; then
log_info "Pulling latest code from git..."
git pull origin main || git pull origin master || log_warn "Failed to pull from git, continuing with local code"
fi
}
# Build Docker image
build_image() {
local rebuild=false
if [[ "$*" == *"--rebuild"* ]]; then
rebuild=true
fi
log_info "Building Docker image..."
if [ "$rebuild" = true ]; then
log_info "Force rebuilding image (no cache)..."
docker-compose -f "$COMPOSE_FILE" build --no-cache
else
docker-compose -f "$COMPOSE_FILE" build
fi
log_info "Docker image built successfully"
}
# Stop existing container
stop_container() {
if docker ps -a --format '{{.Names}}' | grep -q "^${CONTAINER_NAME}$"; then
log_info "Stopping existing container..."
docker-compose -f "$COMPOSE_FILE" down
log_info "Container stopped"
else
log_info "No existing container found"
fi
}
# Start container
start_container() {
log_info "Starting container..."
docker-compose -f "$COMPOSE_FILE" up -d
log_info "Container started"
}
# Show container status
show_status() {
log_info "Container status:"
docker ps --filter "name=${CONTAINER_NAME}" --format "table {{.Names}}\t{{.Status}}\t{{.Ports}}"
log_info "Container logs (last 20 lines):"
docker logs --tail 20 "${CONTAINER_NAME}" 2>&1 || log_warn "Could not fetch logs"
}
# Main deployment flow
main() {
log_info "Starting deployment..."
check_docker
pull_latest "$@"
build_image "$@"
stop_container
start_container
# Wait a moment for container to start
sleep 2
show_status
log_info "Deployment completed!"
log_info "To view logs: docker logs -f ${CONTAINER_NAME}"
log_info "To stop: docker-compose -f ${COMPOSE_FILE} down"
}
# Run main function
main "$@"

34
docker-compose.yml Normal file
View File

@@ -0,0 +1,34 @@
version: "3.8"
services:
cloud-mcp:
build:
context: .
dockerfile: Dockerfile
container_name: cloud-mcp
restart: unless-stopped
volumes:
# Mount .env file if exists (optional, can use environment variables instead)
- ./.env:/app/.env:ro
environment:
- NODE_ENV=production
# PostgreSQL connection (required)
# - DATABASE_URL=${DATABASE_URL}
# Add your environment variables here or use .env file
# - NAS_HOST=${NAS_HOST}
# - SERVER_HOST=${SERVER_HOST}
# etc.
# Note: This service requires an external PostgreSQL database.
# Set DATABASE_URL environment variable to connect to your PostgreSQL instance.
# For local development, you can uncomment the postgres service below:
# depends_on:
# - postgres
stdin_open: true
tty: true
# Health check (optional)
healthcheck:
test: ["CMD", "bun", "--version"]
interval: 30s
timeout: 10s
retries: 3
start_period: 40s

26
ecosystem.config.js Normal file
View File

@@ -0,0 +1,26 @@
// PM2 ecosystem file for cloud-mcp
module.exports = {
apps: [
{
name: "cloud-mcp",
script: "bun",
args: "run src/index.ts",
cwd: "/opt/cloud-mcp",
instances: 1,
exec_mode: "fork",
watch: false,
max_memory_restart: "500M",
env: {
NODE_ENV: "production",
},
error_file: "./logs/cloud-mcp-error.log",
out_file: "./logs/cloud-mcp-out.log",
log_date_format: "YYYY-MM-DD HH:mm:ss Z",
merge_logs: true,
autorestart: true,
max_restarts: 10,
min_uptime: "10s",
},
],
};

View File

@@ -18,6 +18,19 @@ ROUTER_HOST=192.168.1.1
ROUTER_USERNAME=admin
ROUTER_PASSWORD=your-router-password
# Email Configuration
# 邮件配置SMTP
EMAIL_HOST=smtp.gmail.com
EMAIL_PORT=587
EMAIL_USER=your-email@gmail.com
EMAIL_PASSWORD=your-app-password
EMAIL_FROM=Your Name
EMAIL_SECURE=false
# Database Configuration
# PostgreSQL 数据库连接配置
DATABASE_URL=postgresql://user:password@host:port/database
# API Keys (optional)
# 可选:配置 API 密钥以使用完整功能
# 足球信息 API (football-data.org - 免费注册获取)

View File

@@ -7,7 +7,14 @@
"scripts": {
"dev": "bun run src/index.ts",
"build": "bun build src/index.ts --outdir dist --target bun",
"start": "bun run dist/index.js"
"start": "bun run src/index.ts",
"start:pm2": "pm2 start ecosystem.config.js",
"stop:pm2": "pm2 stop cloud-mcp",
"restart:pm2": "pm2 restart cloud-mcp",
"test": "bun test",
"test:watch": "bun test --watch",
"test:coverage": "bun test --coverage",
"init-db": "bun run scripts/init-db.ts"
},
"keywords": [
"mcp",
@@ -19,10 +26,13 @@
"dependencies": {
"@modelcontextprotocol/sdk": "^1.0.4",
"axios": "^1.7.7",
"nodemailer": "^6.9.8",
"postgres": "^3.4.3",
"ssh2": "^1.15.0"
},
"devDependencies": {
"@types/node": "^22.7.9",
"@types/nodemailer": "^6.4.14",
"@types/ssh2": "^1.15.4",
"typescript": "^5.6.3"
},

258
scripts/init-db.ts Normal file
View File

@@ -0,0 +1,258 @@
/**
* Database initialization script
* Creates database and all required tables in PostgreSQL
*/
import postgres from "postgres";
import { readFileSync } from "fs";
import { join } from "path";
/**
* Parse DATABASE_URL and extract components
*/
function parseDatabaseUrl(url: string): {
protocol: string;
user: string;
password: string;
host: string;
port: string;
database: string;
} {
const match = url.match(
/^postgresql:\/\/([^:]+):([^@]+)@([^:]+):(\d+)\/(.+)$/
);
if (!match) {
throw new Error(
"Invalid DATABASE_URL format. Expected: postgresql://user:password@host:port/database"
);
}
return {
protocol: "postgresql",
user: match[1],
password: match[2],
host: match[3],
port: match[4],
database: match[5],
};
}
async function initDatabase() {
const dbUrl = process.env.DATABASE_URL;
if (!dbUrl) {
console.error("Error: DATABASE_URL environment variable is required");
console.error("Please set it in your .env file or export it:");
console.error(
" export DATABASE_URL=postgresql://user:password@host:port/database"
);
process.exit(1);
}
const dbConfig = parseDatabaseUrl(dbUrl);
const targetDatabase = dbConfig.database;
console.log(`Target database: ${targetDatabase}`);
console.log(
`Connecting to PostgreSQL server (${dbConfig.host}:${dbConfig.port})...`
);
// Connect to postgres database (default database) to create target database
const adminUrl = `postgresql://${dbConfig.user}:${dbConfig.password}@${dbConfig.host}:${dbConfig.port}/postgres`;
const adminSql = postgres(adminUrl);
try {
// Check if database exists
const dbExists = await adminSql`
SELECT 1 FROM pg_database WHERE datname = ${targetDatabase}
`;
if (dbExists.length === 0) {
console.log(`Database "${targetDatabase}" does not exist. Creating...`);
// Create database (escape database name to prevent SQL injection)
// PostgreSQL identifiers are case-insensitive unless quoted, so we quote it
const escapedDbName = `"${targetDatabase.replace(/"/g, '""')}"`;
await (adminSql as any).unsafe(`CREATE DATABASE ${escapedDbName}`);
console.log(`✓ Database "${targetDatabase}" created successfully`);
} else {
console.log(`✓ Database "${targetDatabase}" already exists`);
}
} catch (error) {
const errorMessage = (error as Error).message;
if (errorMessage.includes("already exists")) {
console.log(`✓ Database "${targetDatabase}" already exists`);
} else {
console.error(`✗ Failed to create database: ${errorMessage}`);
throw error;
}
} finally {
await adminSql.end();
}
// Now connect to target database and create tables
console.log(`\nConnecting to database "${targetDatabase}"...`);
const sql = postgres(dbUrl);
try {
// Test connection
await sql`SELECT 1`;
console.log("✓ Connected to database");
// Read schema file
const schemaPath = join(process.cwd(), "src", "storage", "schema.sql");
console.log(`Reading schema from ${schemaPath}...`);
const schema = readFileSync(schemaPath, "utf-8");
// Remove comments
const cleanedSchema = schema
.split("\n")
.map((line) => {
const trimmed = line.trim();
if (trimmed.startsWith("--")) {
return "";
}
const commentIndex = line.indexOf("--");
if (commentIndex >= 0) {
return line.substring(0, commentIndex);
}
return line;
})
.join("\n");
// Split by semicolon and reconstruct multi-line statements
const parts = cleanedSchema.split(";");
const allStatements: string[] = [];
let currentStatement = "";
for (let i = 0; i < parts.length; i++) {
const part = parts[i].trim();
if (part.length === 0) {
// If we have a current statement and hit an empty part, it's the end of a statement
if (currentStatement.trim().length > 0) {
allStatements.push(currentStatement.trim());
currentStatement = "";
}
continue;
}
currentStatement += (currentStatement ? " " : "") + part;
// Check if parentheses are balanced (for CREATE TABLE) or if it's a simple statement (CREATE INDEX)
const openCount = (currentStatement.match(/\(/g) || []).length;
const closeCount = (currentStatement.match(/\)/g) || []).length;
const isIndex = currentStatement
.toUpperCase()
.trim()
.startsWith("CREATE INDEX");
// Statement is complete if:
// 1. It's an INDEX (usually single line)
// 2. Parentheses are balanced
// 3. Or it's the last part
if (isIndex || openCount === closeCount || i === parts.length - 1) {
if (currentStatement.trim().length > 0) {
allStatements.push(currentStatement.trim());
currentStatement = "";
}
}
}
// Separate CREATE TABLE and CREATE INDEX statements
const tableStatements: string[] = [];
const indexStatements: string[] = [];
for (const statement of allStatements) {
const upperStatement = statement.toUpperCase().trim();
if (upperStatement.startsWith("CREATE TABLE")) {
tableStatements.push(statement);
} else if (upperStatement.startsWith("CREATE INDEX")) {
indexStatements.push(statement);
}
}
console.log(
`Found ${tableStatements.length} table(s) and ${indexStatements.length} index(es) to create`
);
// First, create all tables
console.log("\nCreating tables...");
for (const statement of tableStatements) {
if (statement) {
try {
await (sql as any).unsafe(statement + ";");
const tableName =
statement.match(
/CREATE TABLE\s+(?:IF NOT EXISTS\s+)?(\w+)/i
)?.[1] || "unknown";
console.log(`✓ Created table: ${tableName}`);
} catch (error) {
const errorMessage = (error as Error).message;
if (
errorMessage.includes("already exists") ||
errorMessage.includes("duplicate")
) {
const tableName =
statement.match(
/CREATE TABLE\s+(?:IF NOT EXISTS\s+)?(\w+)/i
)?.[1] || "unknown";
console.log(`⚠ Table already exists: ${tableName}`);
} else {
console.error(`✗ Error creating table: ${errorMessage}`);
console.error(` Statement: ${statement.substring(0, 100)}...`);
throw error;
}
}
}
}
// Then, create all indexes
console.log("\nCreating indexes...");
for (const statement of indexStatements) {
if (statement) {
try {
await (sql as any).unsafe(statement + ";");
const indexName =
statement.match(
/CREATE INDEX\s+(?:IF NOT EXISTS\s+)?(\w+)/i
)?.[1] || "unknown";
console.log(`✓ Created index: ${indexName}`);
} catch (error) {
const errorMessage = (error as Error).message;
if (
errorMessage.includes("already exists") ||
errorMessage.includes("duplicate")
) {
const indexName =
statement.match(
/CREATE INDEX\s+(?:IF NOT EXISTS\s+)?(\w+)/i
)?.[1] || "unknown";
console.log(`⚠ Index already exists: ${indexName}`);
} else {
console.error(`✗ Error creating index: ${errorMessage}`);
console.error(` Statement: ${statement.substring(0, 100)}...`);
throw error;
}
}
}
}
console.log("\n✓ Database initialization completed successfully!");
console.log("\nTables created:");
console.log(" - code_snippets");
console.log(" - notes");
console.log(" - tasks");
console.log(" - baby_milestones");
console.log(" - math_resources");
console.log(" - game_wishlist");
} catch (error) {
console.error("\n✗ Database initialization failed:", error);
process.exit(1);
} finally {
await sql.end();
}
}
// Run initialization
initDatabase().catch((error) => {
console.error("Fatal error:", error);
process.exit(1);
});

View File

@@ -5,16 +5,19 @@
import { mcpServer } from "./server.js";
import { logger } from "./utils/logger.js";
import { database } from "./storage/database.js";
// Register all tools
import { registerCodeSnippetTools } from "./tools/programming/codeSnippet.js";
import { registerProjectTemplateTools } from "./tools/programming/projectTemplate.js";
import { registerDocsTools } from "./tools/programming/docs.js";
import { registerCodeReviewTools } from "./tools/programming/codeReview.js";
import { registerGitTools } from "./tools/programming/git.js";
import { registerNASTools } from "./tools/devops/nas.js";
import { registerServerTools } from "./tools/devops/server.js";
import { registerRouterTools } from "./tools/devops/router.js";
import { registerDeployTools } from "./tools/devops/deploy.js";
import { registerMathTools } from "./tools/family/math.js";
import { registerBabyTools } from "./tools/family/baby.js";
@@ -24,6 +27,7 @@ import { registerGameTools } from "./tools/hobbies/games.js";
import { registerNoteTools } from "./tools/common/notes.js";
import { registerTaskTools } from "./tools/common/tasks.js";
import { registerEmailTools } from "./tools/common/email.js";
// Register all tool modules
logger.info("Registering tools...");
@@ -33,11 +37,13 @@ registerCodeSnippetTools();
registerProjectTemplateTools();
registerDocsTools();
registerCodeReviewTools();
registerGitTools();
// DevOps tools
registerNASTools();
registerServerTools();
registerRouterTools();
registerDeployTools();
// Family tools
registerMathTools();
@@ -50,11 +56,32 @@ registerGameTools();
// Common tools
registerNoteTools();
registerTaskTools();
registerEmailTools();
logger.info("All tools registered. Starting MCP server...");
logger.info("All tools registered. Initializing database...");
// Initialize database connection
database
.initialize()
.then(() => {
logger.info("Database connected successfully. Starting MCP server...");
// Start the server
mcpServer.start().catch((error) => {
logger.error("Failed to start MCP server:", error);
return mcpServer.start();
})
.catch((error) => {
logger.error("Failed to initialize database or start MCP server:", error);
process.exit(1);
});
// Graceful shutdown
process.on("SIGINT", async () => {
logger.info("Shutting down...");
await database.close();
process.exit(0);
});
process.on("SIGTERM", async () => {
logger.info("Shutting down...");
await database.close();
process.exit(0);
});

View File

@@ -19,6 +19,11 @@ class MCPServer {
private server: Server;
private tools: Map<string, { tool: Tool; handler: ToolHandler }> = new Map();
// Expose tools for testing
getTools(): Map<string, { tool: Tool; handler: ToolHandler }> {
return this.tools;
}
constructor() {
this.server = new Server(
{

View File

@@ -23,10 +23,20 @@ export interface RouterConfig {
password?: string;
}
export interface EmailConfig {
host?: string;
port?: number;
user?: string;
password?: string;
from?: string;
secure?: boolean;
}
export interface AppConfig {
nas: NASConfig;
server: ServerConfig;
router: RouterConfig;
email: EmailConfig;
footballApiKey?: string;
gameApiKey?: string;
}
@@ -57,6 +67,14 @@ class ConfigManager {
username: process.env.ROUTER_USERNAME,
password: process.env.ROUTER_PASSWORD,
},
email: {
host: process.env.EMAIL_HOST,
port: process.env.EMAIL_PORT ? parseInt(process.env.EMAIL_PORT) : 587,
user: process.env.EMAIL_USER,
password: process.env.EMAIL_PASSWORD,
from: process.env.EMAIL_FROM,
secure: process.env.EMAIL_SECURE === 'true',
},
footballApiKey: process.env.FOOTBALL_API_KEY,
gameApiKey: process.env.GAME_API_KEY,
};
@@ -78,6 +96,10 @@ class ConfigManager {
return this.config.router;
}
getEmailConfig(): EmailConfig {
return this.config.email;
}
reload(): void {
this.config = this.loadConfig();
}

View File

@@ -1,13 +1,11 @@
/**
* Database/storage layer for the MCP server
* Uses JSON file storage for simplicity
* Uses PostgreSQL for data persistence
*/
import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs';
import { join } from 'path';
const DATA_DIR = join(process.cwd(), 'data');
import postgres from "postgres";
// Export interfaces (keep them unchanged for compatibility)
export interface CodeSnippet {
id: string;
title: string;
@@ -64,219 +62,434 @@ export interface GameWishlist {
}
class Database {
private ensureDataDir(): void {
if (!existsSync(DATA_DIR)) {
mkdirSync(DATA_DIR, { recursive: true });
}
private sql: postgres.Sql | null = null;
private getConnectionString(): string {
// For tests, use test database URL if provided
const testUrl = process.env.MCP_TEST_DATABASE_URL;
if (testUrl) {
return testUrl;
}
private getFilePath(collection: string): string {
this.ensureDataDir();
return join(DATA_DIR, `${collection}.json`);
const dbUrl = process.env.DATABASE_URL;
if (!dbUrl) {
throw new Error(
"DATABASE_URL environment variable is required. Please set it in your .env file."
);
}
return dbUrl;
}
private readCollection<T>(collection: string): T[] {
const filePath = this.getFilePath(collection);
if (!existsSync(filePath)) {
return [];
private getSql(): postgres.Sql {
if (!this.sql) {
const connectionString = this.getConnectionString();
this.sql = postgres(connectionString, {
max: 10, // Connection pool size
idle_timeout: 20,
connect_timeout: 10,
});
}
return this.sql;
}
async initialize(): Promise<void> {
// Test connection
try {
const content = readFileSync(filePath, 'utf-8');
return JSON.parse(content);
await this.getSql()`SELECT 1`;
} catch (error) {
console.error(`Error reading ${collection}:`, error);
return [];
console.error("Failed to connect to PostgreSQL:", error);
throw new Error(
`Database connection failed: ${error instanceof Error ? error.message : String(error)}`
);
}
}
private writeCollection<T>(collection: string, data: T[]): void {
const filePath = this.getFilePath(collection);
try {
writeFileSync(filePath, JSON.stringify(data, null, 2), 'utf-8');
} catch (error) {
console.error(`Error writing ${collection}:`, error);
throw error;
async close(): Promise<void> {
if (this.sql) {
await this.sql.end();
this.sql = null;
}
}
// Code Snippets
saveCodeSnippet(snippet: CodeSnippet): void {
const snippets = this.readCollection<CodeSnippet>('codeSnippets');
const index = snippets.findIndex((s) => s.id === snippet.id);
if (index >= 0) {
snippets[index] = { ...snippet, updatedAt: new Date().toISOString() };
async saveCodeSnippet(snippet: CodeSnippet): Promise<void> {
const sql = this.getSql();
await sql`
INSERT INTO code_snippets (id, title, code, language, tags, category, created_at, updated_at)
VALUES (${snippet.id}, ${snippet.title}, ${snippet.code}, ${snippet.language}, ${snippet.tags}, ${snippet.category}, ${snippet.createdAt}, ${snippet.updatedAt})
ON CONFLICT (id) DO UPDATE SET
title = EXCLUDED.title,
code = EXCLUDED.code,
language = EXCLUDED.language,
tags = EXCLUDED.tags,
category = EXCLUDED.category,
updated_at = EXCLUDED.updated_at
`;
}
async getCodeSnippets(): Promise<CodeSnippet[]> {
const sql = this.getSql();
const rows = await sql`
SELECT id, title, code, language, tags, category, created_at, updated_at
FROM code_snippets
ORDER BY updated_at DESC
`;
return rows.map((row) => ({
id: row.id,
title: row.title,
code: row.code,
language: row.language,
tags: row.tags || [],
category: row.category || undefined,
createdAt: row.created_at.toISOString(),
updatedAt: row.updated_at.toISOString(),
}));
}
async getCodeSnippet(id: string): Promise<CodeSnippet | undefined> {
const sql = this.getSql();
const rows = await sql`
SELECT id, title, code, language, tags, category, created_at, updated_at
FROM code_snippets
WHERE id = ${id}
LIMIT 1
`;
if (rows.length === 0) {
return undefined;
}
const row = rows[0];
return {
id: row.id,
title: row.title,
code: row.code,
language: row.language,
tags: row.tags || [],
category: row.category || undefined,
createdAt: row.created_at.toISOString(),
updatedAt: row.updated_at.toISOString(),
};
}
async deleteCodeSnippet(id: string): Promise<boolean> {
const sql = this.getSql();
const result = await sql`DELETE FROM code_snippets WHERE id = ${id}`;
return result.count > 0;
}
async searchCodeSnippets(query: string, tags?: string[]): Promise<CodeSnippet[]> {
const sql = this.getSql();
const searchPattern = `%${query}%`;
let rows;
if (tags && tags.length > 0) {
rows = await sql`
SELECT id, title, code, language, tags, category, created_at, updated_at
FROM code_snippets
WHERE (
title ILIKE ${searchPattern} OR
code ILIKE ${searchPattern} OR
language ILIKE ${searchPattern}
) AND tags && ${tags}
ORDER BY updated_at DESC
`;
} else {
snippets.push(snippet);
}
this.writeCollection('codeSnippets', snippets);
rows = await sql`
SELECT id, title, code, language, tags, category, created_at, updated_at
FROM code_snippets
WHERE title ILIKE ${searchPattern} OR code ILIKE ${searchPattern} OR language ILIKE ${searchPattern}
ORDER BY updated_at DESC
`;
}
getCodeSnippets(): CodeSnippet[] {
return this.readCollection<CodeSnippet>('codeSnippets');
}
getCodeSnippet(id: string): CodeSnippet | undefined {
const snippets = this.readCollection<CodeSnippet>('codeSnippets');
return snippets.find((s) => s.id === id);
}
deleteCodeSnippet(id: string): boolean {
const snippets = this.readCollection<CodeSnippet>('codeSnippets');
const filtered = snippets.filter((s) => s.id !== id);
if (filtered.length < snippets.length) {
this.writeCollection('codeSnippets', filtered);
return true;
}
return false;
}
searchCodeSnippets(query: string, tags?: string[]): CodeSnippet[] {
const snippets = this.readCollection<CodeSnippet>('codeSnippets');
const lowerQuery = query.toLowerCase();
return snippets.filter((s) => {
const matchesQuery =
s.title.toLowerCase().includes(lowerQuery) ||
s.code.toLowerCase().includes(lowerQuery) ||
s.language.toLowerCase().includes(lowerQuery);
const matchesTags =
!tags || tags.length === 0 || tags.some((tag) => s.tags.includes(tag));
return matchesQuery && matchesTags;
});
return rows.map((row) => ({
id: row.id,
title: row.title,
code: row.code,
language: row.language,
tags: row.tags || [],
category: row.category || undefined,
createdAt: row.created_at.toISOString(),
updatedAt: row.updated_at.toISOString(),
}));
}
// Notes
saveNote(note: Note): void {
const notes = this.readCollection<Note>('notes');
const index = notes.findIndex((n) => n.id === note.id);
if (index >= 0) {
notes[index] = { ...note, updatedAt: new Date().toISOString() };
} else {
notes.push(note);
}
this.writeCollection('notes', notes);
async saveNote(note: Note): Promise<void> {
const sql = this.getSql();
await sql`
INSERT INTO notes (id, title, content, tags, created_at, updated_at)
VALUES (${note.id}, ${note.title}, ${note.content}, ${note.tags}, ${note.createdAt}, ${note.updatedAt})
ON CONFLICT (id) DO UPDATE SET
title = EXCLUDED.title,
content = EXCLUDED.content,
tags = EXCLUDED.tags,
updated_at = EXCLUDED.updated_at
`;
}
getNotes(): Note[] {
return this.readCollection<Note>('notes');
async getNotes(): Promise<Note[]> {
const sql = this.getSql();
const rows = await sql`
SELECT id, title, content, tags, created_at, updated_at
FROM notes
ORDER BY updated_at DESC
`;
return rows.map((row) => ({
id: row.id,
title: row.title,
content: row.content,
tags: row.tags || [],
createdAt: row.created_at.toISOString(),
updatedAt: row.updated_at.toISOString(),
}));
}
getNote(id: string): Note | undefined {
const notes = this.readCollection<Note>('notes');
return notes.find((n) => n.id === id);
async getNote(id: string): Promise<Note | undefined> {
const sql = this.getSql();
const rows = await sql`
SELECT id, title, content, tags, created_at, updated_at
FROM notes
WHERE id = ${id}
LIMIT 1
`;
if (rows.length === 0) {
return undefined;
}
const row = rows[0];
return {
id: row.id,
title: row.title,
content: row.content,
tags: row.tags || [],
createdAt: row.created_at.toISOString(),
updatedAt: row.updated_at.toISOString(),
};
}
searchNotes(query: string): Note[] {
const notes = this.readCollection<Note>('notes');
const lowerQuery = query.toLowerCase();
return notes.filter(
(n) =>
n.title.toLowerCase().includes(lowerQuery) ||
n.content.toLowerCase().includes(lowerQuery) ||
n.tags.some((tag) => tag.toLowerCase().includes(lowerQuery))
);
async searchNotes(query: string): Promise<Note[]> {
const sql = this.getSql();
const searchPattern = `%${query}%`;
const rows = await sql`
SELECT id, title, content, tags, created_at, updated_at
FROM notes
WHERE title ILIKE ${searchPattern} OR content ILIKE ${searchPattern} OR EXISTS (
SELECT 1 FROM unnest(tags) AS tag WHERE tag ILIKE ${searchPattern}
)
ORDER BY updated_at DESC
`;
return rows.map((row) => ({
id: row.id,
title: row.title,
content: row.content,
tags: row.tags || [],
createdAt: row.created_at.toISOString(),
updatedAt: row.updated_at.toISOString(),
}));
}
deleteNote(id: string): boolean {
const notes = this.readCollection<Note>('notes');
const filtered = notes.filter((n) => n.id !== id);
if (filtered.length < notes.length) {
this.writeCollection('notes', filtered);
return true;
}
return false;
async deleteNote(id: string): Promise<boolean> {
const sql = this.getSql();
const result = await sql`DELETE FROM notes WHERE id = ${id}`;
return result.count > 0;
}
// Tasks
saveTask(task: Task): void {
const tasks = this.readCollection<Task>('tasks');
const index = tasks.findIndex((t) => t.id === task.id);
if (index >= 0) {
tasks[index] = task;
} else {
tasks.push(task);
}
this.writeCollection('tasks', tasks);
async saveTask(task: Task): Promise<void> {
const sql = this.getSql();
await sql`
INSERT INTO tasks (id, title, description, completed, created_at, completed_at)
VALUES (${task.id}, ${task.title}, ${task.description}, ${task.completed}, ${task.createdAt}, ${task.completedAt || null})
ON CONFLICT (id) DO UPDATE SET
title = EXCLUDED.title,
description = EXCLUDED.description,
completed = EXCLUDED.completed,
completed_at = EXCLUDED.completed_at
`;
}
getTasks(completed?: boolean): Task[] {
const tasks = this.readCollection<Task>('tasks');
async getTasks(completed?: boolean): Promise<Task[]> {
const sql = this.getSql();
let rows;
if (completed === undefined) {
return tasks;
rows = await sql`
SELECT id, title, description, completed, created_at, completed_at
FROM tasks
ORDER BY created_at DESC
`;
} else {
rows = await sql`
SELECT id, title, description, completed, created_at, completed_at
FROM tasks
WHERE completed = ${completed}
ORDER BY created_at DESC
`;
}
return tasks.filter((t) => t.completed === completed);
return rows.map((row) => ({
id: row.id,
title: row.title,
description: row.description || undefined,
completed: row.completed,
createdAt: row.created_at.toISOString(),
completedAt: row.completed_at ? row.completed_at.toISOString() : undefined,
}));
}
getTask(id: string): Task | undefined {
const tasks = this.readCollection<Task>('tasks');
return tasks.find((t) => t.id === id);
async getTask(id: string): Promise<Task | undefined> {
const sql = this.getSql();
const rows = await sql`
SELECT id, title, description, completed, created_at, completed_at
FROM tasks
WHERE id = ${id}
LIMIT 1
`;
if (rows.length === 0) {
return undefined;
}
const row = rows[0];
return {
id: row.id,
title: row.title,
description: row.description || undefined,
completed: row.completed,
createdAt: row.created_at.toISOString(),
completedAt: row.completed_at ? row.completed_at.toISOString() : undefined,
};
}
// Baby Milestones
saveBabyMilestone(milestone: BabyMilestone): void {
const milestones = this.readCollection<BabyMilestone>('babyMilestones');
milestones.push(milestone);
this.writeCollection('babyMilestones', milestones);
async saveBabyMilestone(milestone: BabyMilestone): Promise<void> {
const sql = this.getSql();
await sql`
INSERT INTO baby_milestones (id, title, description, date, created_at)
VALUES (${milestone.id}, ${milestone.title}, ${milestone.description}, ${milestone.date}, ${milestone.createdAt})
`;
}
getBabyMilestones(): BabyMilestone[] {
return this.readCollection<BabyMilestone>('babyMilestones');
async getBabyMilestones(): Promise<BabyMilestone[]> {
const sql = this.getSql();
const rows = await sql`
SELECT id, title, description, date, created_at
FROM baby_milestones
ORDER BY date DESC
`;
return rows.map((row) => ({
id: row.id,
title: row.title,
description: row.description,
date: row.date instanceof Date
? row.date.toISOString().split("T")[0]
: String(row.date).split("T")[0], // Format as YYYY-MM-DD
createdAt: row.created_at instanceof Date
? row.created_at.toISOString()
: String(row.created_at),
}));
}
// Math Resources
saveMathResource(resource: MathResource): void {
const resources = this.readCollection<MathResource>('mathResources');
const index = resources.findIndex((r) => r.id === resource.id);
if (index >= 0) {
resources[index] = resource;
async saveMathResource(resource: MathResource): Promise<void> {
const sql = this.getSql();
await sql`
INSERT INTO math_resources (id, title, content, grade, difficulty, tags, created_at)
VALUES (${resource.id}, ${resource.title}, ${resource.content}, ${resource.grade || null}, ${resource.difficulty || null}, ${resource.tags}, ${resource.createdAt})
ON CONFLICT (id) DO UPDATE SET
title = EXCLUDED.title,
content = EXCLUDED.content,
grade = EXCLUDED.grade,
difficulty = EXCLUDED.difficulty,
tags = EXCLUDED.tags
`;
}
async getMathResources(): Promise<MathResource[]> {
const sql = this.getSql();
const rows = await sql`
SELECT id, title, content, grade, difficulty, tags, created_at
FROM math_resources
ORDER BY created_at DESC
`;
return rows.map((row) => ({
id: row.id,
title: row.title,
content: row.content,
grade: row.grade || undefined,
difficulty: row.difficulty || undefined,
tags: row.tags || [],
createdAt: row.created_at.toISOString(),
}));
}
async searchMathResources(query: string, grade?: string): Promise<MathResource[]> {
const sql = this.getSql();
const searchPattern = `%${query}%`;
let rows;
if (grade) {
rows = await sql`
SELECT id, title, content, grade, difficulty, tags, created_at
FROM math_resources
WHERE (
title ILIKE ${searchPattern} OR
content ILIKE ${searchPattern} OR
EXISTS (SELECT 1 FROM unnest(tags) AS tag WHERE tag ILIKE ${searchPattern})
) AND grade = ${grade}
ORDER BY created_at DESC
`;
} else {
resources.push(resource);
}
this.writeCollection('mathResources', resources);
rows = await sql`
SELECT id, title, content, grade, difficulty, tags, created_at
FROM math_resources
WHERE title ILIKE ${searchPattern} OR content ILIKE ${searchPattern} OR EXISTS (
SELECT 1 FROM unnest(tags) AS tag WHERE tag ILIKE ${searchPattern}
)
ORDER BY created_at DESC
`;
}
getMathResources(): MathResource[] {
return this.readCollection<MathResource>('mathResources');
}
searchMathResources(query: string, grade?: string): MathResource[] {
const resources = this.readCollection<MathResource>('mathResources');
const lowerQuery = query.toLowerCase();
return resources.filter((r) => {
const matchesQuery =
r.title.toLowerCase().includes(lowerQuery) ||
r.content.toLowerCase().includes(lowerQuery) ||
r.tags.some((tag) => tag.toLowerCase().includes(lowerQuery));
const matchesGrade = !grade || r.grade === grade;
return matchesQuery && matchesGrade;
});
return rows.map((row) => ({
id: row.id,
title: row.title,
content: row.content,
grade: row.grade || undefined,
difficulty: row.difficulty || undefined,
tags: row.tags || [],
createdAt: row.created_at.toISOString(),
}));
}
// Game Wishlist
saveGameWishlist(game: GameWishlist): void {
const games = this.readCollection<GameWishlist>('gameWishlist');
const index = games.findIndex((g) => g.id === game.id);
if (index >= 0) {
games[index] = game;
} else {
games.push(game);
}
this.writeCollection('gameWishlist', games);
async saveGameWishlist(game: GameWishlist): Promise<void> {
const sql = this.getSql();
await sql`
INSERT INTO game_wishlist (id, game_name, platform, notes, added_at)
VALUES (${game.id}, ${game.gameName}, ${game.platform || null}, ${game.notes || null}, ${game.addedAt})
ON CONFLICT (id) DO UPDATE SET
game_name = EXCLUDED.game_name,
platform = EXCLUDED.platform,
notes = EXCLUDED.notes
`;
}
getGameWishlist(): GameWishlist[] {
return this.readCollection<GameWishlist>('gameWishlist');
async getGameWishlist(): Promise<GameWishlist[]> {
const sql = this.getSql();
const rows = await sql`
SELECT id, game_name, platform, notes, added_at
FROM game_wishlist
ORDER BY added_at DESC
`;
return rows.map((row) => ({
id: row.id,
gameName: row.game_name,
platform: row.platform || undefined,
notes: row.notes || undefined,
addedAt: row.added_at.toISOString(),
}));
}
deleteGameWishlist(id: string): boolean {
const games = this.readCollection<GameWishlist>('gameWishlist');
const filtered = games.filter((g) => g.id !== id);
if (filtered.length < games.length) {
this.writeCollection('gameWishlist', filtered);
return true;
}
return false;
async deleteGameWishlist(id: string): Promise<boolean> {
const sql = this.getSql();
const result = await sql`DELETE FROM game_wishlist WHERE id = ${id}`;
return result.count > 0;
}
}
export const database = new Database();

81
src/storage/schema.sql Normal file
View File

@@ -0,0 +1,81 @@
-- Database schema for Cloud MCP Server
-- PostgreSQL database schema
-- Code Snippets table
CREATE TABLE IF NOT EXISTS code_snippets (
id VARCHAR(255) PRIMARY KEY,
title TEXT NOT NULL,
code TEXT NOT NULL,
language VARCHAR(100) NOT NULL,
tags TEXT[] DEFAULT '{}',
category VARCHAR(100),
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
);
-- Notes table
CREATE TABLE IF NOT EXISTS notes (
id VARCHAR(255) PRIMARY KEY,
title TEXT NOT NULL,
content TEXT NOT NULL,
tags TEXT[] DEFAULT '{}',
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
);
-- Tasks table
CREATE TABLE IF NOT EXISTS tasks (
id VARCHAR(255) PRIMARY KEY,
title TEXT NOT NULL,
description TEXT,
completed BOOLEAN NOT NULL DEFAULT FALSE,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
completed_at TIMESTAMP WITH TIME ZONE
);
-- Baby Milestones table
CREATE TABLE IF NOT EXISTS baby_milestones (
id VARCHAR(255) PRIMARY KEY,
title TEXT NOT NULL,
description TEXT NOT NULL,
date DATE NOT NULL,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
);
-- Math Resources table
CREATE TABLE IF NOT EXISTS math_resources (
id VARCHAR(255) PRIMARY KEY,
title TEXT NOT NULL,
content TEXT NOT NULL,
grade VARCHAR(50),
difficulty VARCHAR(50),
tags TEXT[] DEFAULT '{}',
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
);
-- Game Wishlist table
CREATE TABLE IF NOT EXISTS game_wishlist (
id VARCHAR(255) PRIMARY KEY,
game_name TEXT NOT NULL,
platform VARCHAR(100),
notes TEXT,
added_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW()
);
-- Create indexes for better search performance
CREATE INDEX IF NOT EXISTS idx_code_snippets_tags ON code_snippets USING GIN(tags);
CREATE INDEX IF NOT EXISTS idx_code_snippets_language ON code_snippets(language);
CREATE INDEX IF NOT EXISTS idx_code_snippets_category ON code_snippets(category);
CREATE INDEX IF NOT EXISTS idx_notes_tags ON notes USING GIN(tags);
CREATE INDEX IF NOT EXISTS idx_tasks_completed ON tasks(completed);
CREATE INDEX IF NOT EXISTS idx_tasks_created_at ON tasks(created_at);
CREATE INDEX IF NOT EXISTS idx_baby_milestones_date ON baby_milestones(date);
CREATE INDEX IF NOT EXISTS idx_math_resources_tags ON math_resources USING GIN(tags);
CREATE INDEX IF NOT EXISTS idx_math_resources_grade ON math_resources(grade);
CREATE INDEX IF NOT EXISTS idx_math_resources_difficulty ON math_resources(difficulty);
CREATE INDEX IF NOT EXISTS idx_game_wishlist_platform ON game_wishlist(platform);

206
src/tools/common/email.ts Normal file
View File

@@ -0,0 +1,206 @@
/**
* Email sending tools
*/
import { mcpServer } from "../../server.js";
import { configManager } from "../../storage/config.js";
import { logger } from "../../utils/logger.js";
import nodemailer from "nodemailer";
import { readFileSync, existsSync } from "fs";
export function registerEmailTools(): void {
// Send email
mcpServer.registerTool(
{
name: "email_send",
description:
"Send an email via SMTP with support for text, HTML, and attachments",
inputSchema: {
type: "object",
properties: {
to: {
type: "string",
description: "Recipient email address (required)",
},
subject: {
type: "string",
description: "Email subject (required)",
},
text: {
type: "string",
description: "Plain text email body (required)",
},
html: {
type: "string",
description: "HTML email body (optional, if not provided, text will be used)",
},
cc: {
type: "string",
description: "CC recipient email address (optional)",
},
bcc: {
type: "string",
description: "BCC recipient email address (optional)",
},
attachments: {
type: "array",
description: "Email attachments (optional)",
items: {
type: "object",
properties: {
filename: {
type: "string",
description: "Attachment filename",
},
path: {
type: "string",
description: "Path to file (if using file path)",
},
content: {
type: "string",
description: "File content as string (if using content directly)",
},
},
},
},
},
required: ["to", "subject", "text"],
},
},
async (args) => {
const emailConfig = configManager.getEmailConfig();
if (!emailConfig.host || !emailConfig.user || !emailConfig.password) {
return {
content: [
{
type: "text",
text: "Error: Email configuration not found. Please set EMAIL_HOST, EMAIL_USER, and EMAIL_PASSWORD in environment variables.",
},
],
isError: true,
};
}
try {
const to = args.to as string;
const subject = args.subject as string;
const text = args.text as string;
const html = args.html as string | undefined;
const cc = args.cc as string | undefined;
const bcc = args.bcc as string | undefined;
const attachments = args.attachments as
| Array<{ filename: string; path?: string; content?: string }>
| undefined;
// Create transporter
const transporter = nodemailer.createTransport({
host: emailConfig.host,
port: emailConfig.port || 587,
secure: emailConfig.secure || false,
auth: {
user: emailConfig.user,
pass: emailConfig.password,
},
});
// Process attachments
const processedAttachments: any[] = [];
if (attachments && attachments.length > 0) {
for (const attachment of attachments) {
if (attachment.path) {
// Use file path
if (existsSync(attachment.path)) {
processedAttachments.push({
filename: attachment.filename,
path: attachment.path,
});
} else {
logger.warn(`Attachment file not found: ${attachment.path}`);
return {
content: [
{
type: "text",
text: `Error: Attachment file not found: ${attachment.path}`,
},
],
isError: true,
};
}
} else if (attachment.content) {
// Use content directly
processedAttachments.push({
filename: attachment.filename,
content: attachment.content,
});
} else {
logger.warn(
`Invalid attachment: missing path or content for ${attachment.filename}`
);
return {
content: [
{
type: "text",
text: `Error: Invalid attachment: missing path or content for ${attachment.filename}`,
},
],
isError: true,
};
}
}
}
// Prepare email options
const mailOptions: any = {
from: emailConfig.from || emailConfig.user,
to: to,
subject: subject,
text: text,
};
if (html) {
mailOptions.html = html;
}
if (cc) {
mailOptions.cc = cc;
}
if (bcc) {
mailOptions.bcc = bcc;
}
if (processedAttachments.length > 0) {
mailOptions.attachments = processedAttachments;
}
// Send email
logger.info(`Sending email to ${to} with subject: ${subject}`);
const info = await transporter.sendMail(mailOptions);
return {
content: [
{
type: "text",
text: `Email sent successfully!\n\nTo: ${to}\nSubject: ${subject}\nMessage ID: ${info.messageId}${processedAttachments.length > 0 ? `\nAttachments: ${processedAttachments.length}` : ""}`,
},
],
};
} catch (error) {
logger.error("Error sending email:", error);
const errorMessage =
error instanceof Error ? error.message : String(error);
return {
content: [
{
type: "text",
text: `Error sending email: ${errorMessage}\n\nPlease check:\n1. Email configuration (EMAIL_HOST, EMAIL_USER, EMAIL_PASSWORD)\n2. SMTP server connection\n3. Recipient email address`,
},
],
isError: true,
};
}
}
);
}

View File

@@ -39,18 +39,19 @@ export function registerNoteTools(): void {
},
async (args) => {
const now = new Date().toISOString();
const existingNote = args.id
? await database.getNote(args.id as string)
: undefined;
const note: Note = {
id: (args.id as string) || randomUUID(),
title: args.title as string,
content: args.content as string,
tags: (args.tags as string[]) || [],
createdAt: args.id
? database.getNote(args.id)?.createdAt || now
: now,
createdAt: existingNote?.createdAt || now,
updatedAt: now,
};
database.saveNote(note);
await database.saveNote(note);
return {
content: [
@@ -81,7 +82,7 @@ export function registerNoteTools(): void {
},
async (args) => {
const query = args.query as string;
const notes = database.searchNotes(query);
const notes = await database.searchNotes(query);
if (notes.length === 0) {
return {
@@ -128,7 +129,7 @@ export function registerNoteTools(): void {
},
},
async (args) => {
const notes = database.getNotes();
const notes = await database.getNotes();
const limit = args.limit as number | undefined;
// Sort by updated date (newest first)
@@ -185,7 +186,7 @@ export function registerNoteTools(): void {
},
async (args) => {
const id = args.id as string;
const deleted = database.deleteNote(id);
const deleted = await database.deleteNote(id);
if (deleted) {
return {

View File

@@ -37,7 +37,7 @@ export function registerTaskTools(): void {
createdAt: new Date().toISOString(),
};
database.saveTask(task);
await database.saveTask(task);
return {
content: [
@@ -67,7 +67,7 @@ export function registerTaskTools(): void {
},
async (args) => {
const completed = args.completed as boolean | undefined;
const tasks = database.getTasks(completed);
const tasks = await database.getTasks(completed);
if (tasks.length === 0) {
const statusText = completed === true ? 'completed' : completed === false ? 'pending' : '';
@@ -93,9 +93,12 @@ export function registerTaskTools(): void {
})
.join('\n\n---\n\n');
const total = database.getTasks().length;
const completedCount = database.getTasks(true).length;
const pendingCount = database.getTasks(false).length;
const allTasks = await database.getTasks();
const completedTasks = await database.getTasks(true);
const pendingTasks = await database.getTasks(false);
const total = allTasks.length;
const completedCount = completedTasks.length;
const pendingCount = pendingTasks.length;
return {
content: [
@@ -126,7 +129,7 @@ export function registerTaskTools(): void {
},
async (args) => {
const id = args.id as string;
const task = database.getTask(id);
const task = await database.getTask(id);
if (!task) {
return {
@@ -152,7 +155,7 @@ export function registerTaskTools(): void {
task.completed = true;
task.completedAt = new Date().toISOString();
database.saveTask(task);
await database.saveTask(task);
return {
content: [

384
src/tools/devops/deploy.ts Normal file
View File

@@ -0,0 +1,384 @@
/**
* Deployment tools - for direct server deployment
* These tools run directly on the server, no SSH needed
*/
import { mcpServer } from "../../server.js";
import { logger } from "../../utils/logger.js";
import { execSync } from "child_process";
import { existsSync, readFileSync } from "fs";
import { join } from "path";
export function registerDeployTools(): void {
// Pull latest code and deploy
mcpServer.registerTool(
{
name: "deploy_update",
description:
"Pull latest code from git and redeploy the MCP server (runs directly on server)",
inputSchema: {
type: "object",
properties: {
branch: {
type: "string",
description: "Git branch to pull from (default: main or master)",
default: "main",
},
rebuild: {
type: "boolean",
description: "Force rebuild Docker image (default: false)",
default: false,
},
},
},
},
async (args) => {
try {
const branch = (args.branch as string) || "main";
const rebuild = (args.rebuild as boolean) || false;
const projectDir = process.cwd();
logger.info(`Starting deployment update in ${projectDir}`);
let output = "Deployment Update\n\n";
const steps: string[] = [];
// Step 1: Check if we're in a git repository
if (!existsSync(join(projectDir, ".git"))) {
return {
content: [
{
type: "text",
text: "Error: Not in a git repository. Please run this from the project directory.",
},
],
isError: true,
};
}
// Step 2: Fetch latest changes
try {
steps.push("Fetching latest changes from git...");
const fetchOutput = execSync("git fetch origin", {
cwd: projectDir,
encoding: "utf-8",
});
output += `✓ Fetched changes\n`;
} catch (error) {
return {
content: [
{
type: "text",
text: `Error fetching from git: ${
error instanceof Error ? error.message : String(error)
}`,
},
],
isError: true,
};
}
// Step 3: Check current branch
const currentBranch = execSync("git rev-parse --abbrev-ref HEAD", {
cwd: projectDir,
encoding: "utf-8",
}).trim();
// Step 4: Pull latest code
try {
steps.push(`Pulling latest code from ${branch}...`);
const pullOutput = execSync(`git pull origin ${branch}`, {
cwd: projectDir,
encoding: "utf-8",
});
output += `✓ Pulled latest code from ${branch}\n`;
output += `\n${pullOutput}\n`;
} catch (error) {
return {
content: [
{
type: "text",
text: `Error pulling code: ${
error instanceof Error ? error.message : String(error)
}`,
},
],
isError: true,
};
}
// Step 5: Install dependencies if package.json changed
try {
steps.push("Checking for dependency updates...");
const hasPackageChanges = execSync(
"git diff HEAD@{1} HEAD -- package.json bun.lockb",
{ cwd: projectDir, encoding: "utf-8" }
).trim();
if (hasPackageChanges) {
steps.push("Installing/updating dependencies...");
execSync("bun install", {
cwd: projectDir,
encoding: "utf-8",
stdio: "inherit",
});
output += `✓ Dependencies updated\n`;
} else {
output += `✓ No dependency changes\n`;
}
} catch (error) {
// Not critical, continue
logger.warn("Could not check dependencies:", error);
}
// Step 6: Restart service (if using systemd or PM2)
try {
steps.push("Restarting MCP server...");
// Check if running as systemd service
const serviceName = process.env.MCP_SERVICE_NAME || "cloud-mcp";
try {
execSync(`systemctl is-active --quiet ${serviceName}`, {
encoding: "utf-8",
});
// Service exists and is active, restart it
execSync(`sudo systemctl restart ${serviceName}`, {
encoding: "utf-8",
});
output += `✓ Restarted systemd service: ${serviceName}\n`;
} catch {
// Not a systemd service, try PM2
try {
execSync("pm2 list | grep cloud-mcp", {
encoding: "utf-8",
});
execSync("pm2 restart cloud-mcp", {
encoding: "utf-8",
});
output += `✓ Restarted PM2 process: cloud-mcp\n`;
} catch {
// Not PM2 either, just log that manual restart is needed
output += `⚠ Service restart skipped (not running as systemd/PM2)\n`;
output += ` Please restart the MCP server manually\n`;
}
}
} catch (error) {
logger.warn("Could not restart service:", error);
output += `⚠ Could not auto-restart service\n`;
}
output += `\n✅ Deployment update completed!\n`;
output += `\nSteps executed:\n${steps
.map((s, i) => `${i + 1}. ${s}`)
.join("\n")}`;
return {
content: [
{
type: "text",
text: output,
},
],
};
} catch (error) {
logger.error("Deployment error:", error);
return {
content: [
{
type: "text",
text: `Error during deployment: ${
error instanceof Error ? error.message : String(error)
}`,
},
],
isError: true,
};
}
}
);
// Check deployment status
mcpServer.registerTool(
{
name: "deploy_status",
description:
"Check deployment status - git status, last commit, service status",
inputSchema: {
type: "object",
properties: {},
},
},
async () => {
try {
const projectDir = process.cwd();
let output = "Deployment Status\n\n";
// Git status
try {
const gitStatus = execSync("git status --short", {
cwd: projectDir,
encoding: "utf-8",
}).trim();
const lastCommit = execSync("git log -1 --oneline", {
cwd: projectDir,
encoding: "utf-8",
}).trim();
const currentBranch = execSync("git rev-parse --abbrev-ref HEAD", {
cwd: projectDir,
encoding: "utf-8",
}).trim();
output += `📦 Git Status:\n`;
output += `Branch: ${currentBranch}\n`;
output += `Last commit: ${lastCommit}\n`;
// Check if behind remote
try {
execSync("git fetch origin", { cwd: projectDir });
const behind = execSync(
`git rev-list HEAD..origin/${currentBranch} --count`,
{ cwd: projectDir, encoding: "utf-8" }
).trim();
if (behind !== "0") {
output += `⚠ Behind remote by ${behind} commit(s)\n`;
} else {
output += `✓ Up to date with remote\n`;
}
} catch {
// Ignore
}
if (gitStatus) {
output += `\nUncommitted changes:\n${gitStatus}\n`;
} else {
output += `✓ Working directory clean\n`;
}
} catch (error) {
output += `Error checking git status: ${
error instanceof Error ? error.message : String(error)
}\n`;
}
// Service status
output += `\n🔧 Service Status:\n`;
const serviceName = process.env.MCP_SERVICE_NAME || "cloud-mcp";
try {
const systemdStatus = execSync(`systemctl is-active ${serviceName}`, {
encoding: "utf-8",
}).trim();
output += `Systemd: ${systemdStatus}\n`;
} catch {
try {
const pm2Status = execSync("pm2 list | grep cloud-mcp", {
encoding: "utf-8",
});
output += `PM2: Running\n`;
} catch {
output += `Service: Not managed by systemd/PM2\n`;
}
}
// Process status
try {
const processInfo = execSync(
"ps aux | grep 'bun.*index.ts' | grep -v grep",
{
encoding: "utf-8",
}
).trim();
if (processInfo) {
output += `\nProcess: Running\n`;
} else {
output += `\nProcess: Not found\n`;
}
} catch {
output += `\nProcess: Unknown\n`;
}
return {
content: [
{
type: "text",
text: output,
},
],
};
} catch (error) {
return {
content: [
{
type: "text",
text: `Error checking status: ${
error instanceof Error ? error.message : String(error)
}`,
},
],
isError: true,
};
}
}
);
// View deployment logs
mcpServer.registerTool(
{
name: "deploy_logs",
description: "View deployment logs",
inputSchema: {
type: "object",
properties: {
lines: {
type: "number",
description: "Number of lines to show (default: 50)",
default: 50,
},
},
},
},
async (args) => {
try {
const lines = (args.lines as number) || 50;
const logFile = join(process.cwd(), "deploy.log");
if (existsSync(logFile)) {
const content = readFileSync(logFile, "utf-8");
const logLines = content.split("\n").slice(-lines).join("\n");
return {
content: [
{
type: "text",
text: `Deployment Logs (last ${lines} lines):\n\n${logLines}`,
},
],
};
} else {
return {
content: [
{
type: "text",
text: "No deployment log file found. Logs will be created on first deployment.",
},
],
};
}
} catch (error) {
return {
content: [
{
type: "text",
text: `Error reading logs: ${
error instanceof Error ? error.message : String(error)
}`,
},
],
isError: true,
};
}
}
);
}

View File

@@ -42,7 +42,7 @@ export function registerBabyTools(): void {
createdAt: now,
};
database.saveBabyMilestone(milestone);
await database.saveBabyMilestone(milestone);
return {
content: [
@@ -71,7 +71,7 @@ export function registerBabyTools(): void {
},
},
async (args) => {
const milestones = database.getBabyMilestones();
const milestones = await database.getBabyMilestones();
const limit = args.limit as number | undefined;
// Sort by date (newest first)
@@ -157,7 +157,7 @@ export function registerBabyTools(): void {
createdAt: new Date().toISOString(),
};
db.saveTask(task);
await db.saveTask(task);
// Common baby reminders reference
const commonReminders: Record<string, string> = {

View File

@@ -32,7 +32,7 @@ export function registerMathTools(): void {
const query = args.query as string;
const grade = args.grade as string | undefined;
const resources = database.searchMathResources(query, grade);
const resources = await database.searchMathResources(query, grade);
if (resources.length === 0) {
return {
@@ -218,7 +218,7 @@ export function registerMathTools(): void {
createdAt: now,
};
database.saveMathResource(resource);
await database.saveMathResource(resource);
return {
content: [

View File

@@ -224,7 +224,7 @@ export function registerGameTools(): void {
addedAt: new Date().toISOString(),
};
database.saveGameWishlist(game);
await database.saveGameWishlist(game);
return {
content: [
@@ -235,7 +235,7 @@ export function registerGameTools(): void {
],
};
} else if (action === 'list') {
const games = database.getGameWishlist();
const games = await database.getGameWishlist();
if (games.length === 0) {
return {
@@ -277,7 +277,7 @@ export function registerGameTools(): void {
};
}
const deleted = database.deleteGameWishlist(id);
const deleted = await database.deleteGameWishlist(id);
if (deleted) {
return {

View File

@@ -48,6 +48,9 @@ export function registerCodeSnippetTools(): void {
},
async (args) => {
const now = new Date().toISOString();
const existingSnippet = args.id
? await database.getCodeSnippet(args.id as string)
: undefined;
const snippet: CodeSnippet = {
id: (args.id as string) || randomUUID(),
title: args.title as string,
@@ -55,13 +58,11 @@ export function registerCodeSnippetTools(): void {
language: args.language as string,
tags: args.tags as string[],
category: args.category as string,
createdAt: args.id
? database.getCodeSnippet(args.id as string)?.createdAt || now
: now,
createdAt: existingSnippet?.createdAt || now,
updatedAt: now,
};
database.saveCodeSnippet(snippet);
await database.saveCodeSnippet(snippet);
return {
content: [
@@ -98,7 +99,7 @@ export function registerCodeSnippetTools(): void {
async (args) => {
const query = args.query as string;
const tags = args.tags as string[] | undefined;
const snippets = database.searchCodeSnippets(query, tags);
const snippets = await database.searchCodeSnippets(query, tags);
if (snippets.length === 0) {
return {
@@ -149,7 +150,7 @@ export function registerCodeSnippetTools(): void {
},
},
async (args) => {
const snippets = database.getCodeSnippets();
const snippets = await database.getCodeSnippets();
const limit = args.limit as number | undefined;
const limited = limit ? snippets.slice(0, limit) : snippets;
@@ -202,7 +203,7 @@ export function registerCodeSnippetTools(): void {
},
async (args) => {
const id = args.id as string;
const deleted = database.deleteCodeSnippet(id);
const deleted = await database.deleteCodeSnippet(id);
if (deleted) {
return {

View File

@@ -0,0 +1,672 @@
/**
* Git version control tools
*/
import { mcpServer } from "../../server.js";
import { logger } from "../../utils/logger.js";
import { execSync } from "child_process";
import { existsSync } from "fs";
import { join } from "path";
export function registerGitTools(): void {
// Git status
mcpServer.registerTool(
{
name: "git_status",
description: "Get git repository status (working directory, staged files, branch)",
inputSchema: {
type: "object",
properties: {
path: {
type: "string",
description: "Path to git repository (default: current directory)",
default: ".",
},
},
},
},
async (args) => {
try {
const repoPath = (args.path as string) || process.cwd();
const gitDir = join(repoPath, ".git");
if (!existsSync(gitDir)) {
return {
content: [
{
type: "text",
text: `Error: Not a git repository: ${repoPath}`,
},
],
isError: true,
};
}
const status = execSync("git status --short", {
cwd: repoPath,
encoding: "utf-8",
}).trim();
const branch = execSync("git rev-parse --abbrev-ref HEAD", {
cwd: repoPath,
encoding: "utf-8",
}).trim();
const lastCommit = execSync("git log -1 --oneline", {
cwd: repoPath,
encoding: "utf-8",
}).trim();
let output = `Git Status (${repoPath})\n\n`;
output += `Branch: ${branch}\n`;
output += `Last commit: ${lastCommit}\n\n`;
if (status) {
output += `Working directory changes:\n${status}`;
} else {
output += "Working directory clean";
}
return {
content: [
{
type: "text",
text: output,
},
],
};
} catch (error) {
return {
content: [
{
type: "text",
text: `Error: ${error instanceof Error ? error.message : String(error)}`,
},
],
isError: true,
};
}
}
);
// Git add
mcpServer.registerTool(
{
name: "git_add",
description: "Stage files for commit",
inputSchema: {
type: "object",
properties: {
files: {
type: "array",
items: { type: "string" },
description: "Files to stage (use '.' for all files)",
},
path: {
type: "string",
description: "Path to git repository (default: current directory)",
default: ".",
},
},
required: ["files"],
},
},
async (args) => {
try {
const repoPath = (args.path as string) || process.cwd();
const files = args.files as string[];
const gitDir = join(repoPath, ".git");
if (!existsSync(gitDir)) {
return {
content: [
{
type: "text",
text: `Error: Not a git repository: ${repoPath}`,
},
],
isError: true,
};
}
const filesToAdd = files.length === 0 ? ["."] : files;
execSync(`git add ${filesToAdd.join(" ")}`, {
cwd: repoPath,
encoding: "utf-8",
});
const staged = execSync("git diff --cached --name-only", {
cwd: repoPath,
encoding: "utf-8",
}).trim();
return {
content: [
{
type: "text",
text: `Files staged successfully!\n\nStaged files:\n${staged || "No files staged"}`,
},
],
};
} catch (error) {
return {
content: [
{
type: "text",
text: `Error: ${error instanceof Error ? error.message : String(error)}`,
},
],
isError: true,
};
}
}
);
// Git commit
mcpServer.registerTool(
{
name: "git_commit",
description: "Commit staged changes",
inputSchema: {
type: "object",
properties: {
message: {
type: "string",
description: "Commit message",
},
path: {
type: "string",
description: "Path to git repository (default: current directory)",
default: ".",
},
},
required: ["message"],
},
},
async (args) => {
try {
const repoPath = (args.path as string) || process.cwd();
const message = args.message as string;
const gitDir = join(repoPath, ".git");
if (!existsSync(gitDir)) {
return {
content: [
{
type: "text",
text: `Error: Not a git repository: ${repoPath}`,
},
],
isError: true,
};
}
execSync(`git commit -m "${message}"`, {
cwd: repoPath,
encoding: "utf-8",
});
const commitHash = execSync("git rev-parse HEAD", {
cwd: repoPath,
encoding: "utf-8",
}).trim();
const commitInfo = execSync("git log -1 --oneline", {
cwd: repoPath,
encoding: "utf-8",
}).trim();
return {
content: [
{
type: "text",
text: `Commit created successfully!\n\n${commitInfo}\nHash: ${commitHash}`,
},
],
};
} catch (error) {
return {
content: [
{
type: "text",
text: `Error: ${error instanceof Error ? error.message : String(error)}\n\nNote: Make sure you have staged files before committing.`,
},
],
isError: true,
};
}
}
);
// Git push
mcpServer.registerTool(
{
name: "git_push",
description: "Push commits to remote repository",
inputSchema: {
type: "object",
properties: {
remote: {
type: "string",
description: "Remote name (default: origin)",
default: "origin",
},
branch: {
type: "string",
description: "Branch to push (default: current branch)",
},
path: {
type: "string",
description: "Path to git repository (default: current directory)",
default: ".",
},
},
},
},
async (args) => {
try {
const repoPath = (args.path as string) || process.cwd();
const remote = (args.remote as string) || "origin";
const branch = args.branch as string | undefined;
const gitDir = join(repoPath, ".git");
if (!existsSync(gitDir)) {
return {
content: [
{
type: "text",
text: `Error: Not a git repository: ${repoPath}`,
},
],
isError: true,
};
}
const currentBranch =
branch ||
execSync("git rev-parse --abbrev-ref HEAD", {
cwd: repoPath,
encoding: "utf-8",
}).trim();
execSync(`git push ${remote} ${currentBranch}`, {
cwd: repoPath,
encoding: "utf-8",
});
return {
content: [
{
type: "text",
text: `Pushed to ${remote}/${currentBranch} successfully!`,
},
],
};
} catch (error) {
return {
content: [
{
type: "text",
text: `Error: ${error instanceof Error ? error.message : String(error)}\n\nNote: Make sure you have commits to push and remote is configured.`,
},
],
isError: true,
};
}
}
);
// Git pull
mcpServer.registerTool(
{
name: "git_pull",
description: "Pull latest changes from remote repository",
inputSchema: {
type: "object",
properties: {
remote: {
type: "string",
description: "Remote name (default: origin)",
default: "origin",
},
branch: {
type: "string",
description: "Branch to pull (default: current branch)",
},
path: {
type: "string",
description: "Path to git repository (default: current directory)",
default: ".",
},
},
},
},
async (args) => {
try {
const repoPath = (args.path as string) || process.cwd();
const remote = (args.remote as string) || "origin";
const branch = args.branch as string | undefined;
const gitDir = join(repoPath, ".git");
if (!existsSync(gitDir)) {
return {
content: [
{
type: "text",
text: `Error: Not a git repository: ${repoPath}`,
},
],
isError: true,
};
}
const currentBranch =
branch ||
execSync("git rev-parse --abbrev-ref HEAD", {
cwd: repoPath,
encoding: "utf-8",
}).trim();
const output = execSync(`git pull ${remote} ${currentBranch}`, {
cwd: repoPath,
encoding: "utf-8",
});
return {
content: [
{
type: "text",
text: `Pulled from ${remote}/${currentBranch} successfully!\n\n${output}`,
},
],
};
} catch (error) {
return {
content: [
{
type: "text",
text: `Error: ${error instanceof Error ? error.message : String(error)}`,
},
],
isError: true,
};
}
}
);
// Git log
mcpServer.registerTool(
{
name: "git_log",
description: "Show commit history",
inputSchema: {
type: "object",
properties: {
limit: {
type: "number",
description: "Number of commits to show (default: 10)",
default: 10,
},
path: {
type: "string",
description: "Path to git repository (default: current directory)",
default: ".",
},
},
},
},
async (args) => {
try {
const repoPath = (args.path as string) || process.cwd();
const limit = (args.limit as number) || 10;
const gitDir = join(repoPath, ".git");
if (!existsSync(gitDir)) {
return {
content: [
{
type: "text",
text: `Error: Not a git repository: ${repoPath}`,
},
],
isError: true,
};
}
const log = execSync(`git log -${limit} --oneline --decorate`, {
cwd: repoPath,
encoding: "utf-8",
}).trim();
return {
content: [
{
type: "text",
text: `Recent commits (${limit}):\n\n${log || "No commits found"}`,
},
],
};
} catch (error) {
return {
content: [
{
type: "text",
text: `Error: ${error instanceof Error ? error.message : String(error)}`,
},
],
isError: true,
};
}
}
);
// Git branch
mcpServer.registerTool(
{
name: "git_branch",
description: "List, create, or delete branches",
inputSchema: {
type: "object",
properties: {
action: {
type: "string",
description: "Action: 'list', 'create', or 'delete'",
enum: ["list", "create", "delete"],
default: "list",
},
name: {
type: "string",
description: "Branch name (required for create/delete)",
},
path: {
type: "string",
description: "Path to git repository (default: current directory)",
default: ".",
},
},
},
},
async (args) => {
try {
const repoPath = (args.path as string) || process.cwd();
const action = (args.action as string) || "list";
const name = args.name as string | undefined;
const gitDir = join(repoPath, ".git");
if (!existsSync(gitDir)) {
return {
content: [
{
type: "text",
text: `Error: Not a git repository: ${repoPath}`,
},
],
isError: true,
};
}
if (action === "list") {
const branches = execSync("git branch -a", {
cwd: repoPath,
encoding: "utf-8",
}).trim();
const currentBranch = execSync("git rev-parse --abbrev-ref HEAD", {
cwd: repoPath,
encoding: "utf-8",
}).trim();
return {
content: [
{
type: "text",
text: `Branches:\n\n${branches}\n\nCurrent branch: ${currentBranch}`,
},
],
};
} else if (action === "create") {
if (!name) {
return {
content: [
{
type: "text",
text: "Error: Branch name is required for create action",
},
],
isError: true,
};
}
execSync(`git branch ${name}`, {
cwd: repoPath,
encoding: "utf-8",
});
return {
content: [
{
type: "text",
text: `Branch "${name}" created successfully!`,
},
],
};
} else if (action === "delete") {
if (!name) {
return {
content: [
{
type: "text",
text: "Error: Branch name is required for delete action",
},
],
isError: true,
};
}
execSync(`git branch -d ${name}`, {
cwd: repoPath,
encoding: "utf-8",
});
return {
content: [
{
type: "text",
text: `Branch "${name}" deleted successfully!`,
},
],
};
}
return {
content: [
{
type: "text",
text: "Invalid action",
},
],
isError: true,
};
} catch (error) {
return {
content: [
{
type: "text",
text: `Error: ${error instanceof Error ? error.message : String(error)}`,
},
],
isError: true,
};
}
}
);
// Git diff
mcpServer.registerTool(
{
name: "git_diff",
description: "Show changes between commits, branches, or working directory",
inputSchema: {
type: "object",
properties: {
path: {
type: "string",
description: "Path to git repository (default: current directory)",
default: ".",
},
file: {
type: "string",
description: "Specific file to show diff (optional)",
},
},
},
},
async (args) => {
try {
const repoPath = (args.path as string) || process.cwd();
const file = args.file as string | undefined;
const gitDir = join(repoPath, ".git");
if (!existsSync(gitDir)) {
return {
content: [
{
type: "text",
text: `Error: Not a git repository: ${repoPath}`,
},
],
isError: true,
};
}
const diffCommand = file ? `git diff ${file}` : "git diff";
const diff = execSync(diffCommand, {
cwd: repoPath,
encoding: "utf-8",
}).trim();
return {
content: [
{
type: "text",
text: diff || "No changes found",
},
],
};
} catch (error) {
return {
content: [
{
type: "text",
text: `Error: ${error instanceof Error ? error.message : String(error)}`,
},
],
isError: true,
};
}
}
);
}

View File

@@ -1,3 +1,9 @@
/*
* @Date: 2026-01-06 15:03:24
* @LastEditors: 陈子健
* @LastEditTime: 2026-01-07 10:04:47
* @FilePath: /cloud-mcp/src/tools/programming/projectTemplate.ts
*/
/**
* Project template generation tools
*/
@@ -323,6 +329,7 @@ bun run build
try {
mkdirSync(projectPath, { recursive: true });
mkdirSync(join(projectPath, "frontend"), { recursive: true });
mkdirSync(join(projectPath, "frontend", "src"), { recursive: true });
mkdirSync(join(projectPath, "backend"), { recursive: true });
mkdirSync(join(projectPath, "backend", "src"), { recursive: true });
@@ -433,7 +440,7 @@ serve({
module: "ESNext",
lib: ["ES2022"],
moduleResolution: "bundler",
types: ["bun-types"],
types: ["node"],
strict: true,
esModuleInterop: true,
skipLibCheck: true,

173
tests/README.md Normal file
View File

@@ -0,0 +1,173 @@
# 测试文档
## 测试框架
项目使用 **Bun 内置测试框架** (`bun test`),支持 TypeScript无需额外配置。
## 运行测试
```bash
# 运行所有测试
bun test
# 运行特定测试文件
bun test tests/unit/storage/database.test.ts
# 监听模式(自动重新运行)
bun test --watch
# 生成覆盖率报告
bun test --coverage
```
## 测试结构
```
tests/
├── helpers/ # 测试辅助函数
│ ├── test-utils.ts # 通用测试工具(临时目录、环境变量等)
│ ├── database-helper.ts # 数据库测试辅助
│ └── tool-helper.ts # 工具测试辅助
├── fixtures/ # 测试数据
│ └── test-data.ts # 测试数据定义
├── unit/ # 单元测试
│ ├── storage/ # 存储层测试
│ │ ├── database.test.ts
│ │ └── config.test.ts
│ └── tools/ # 工具测试
│ ├── programming/ # 编程工具
│ ├── family/ # 家庭工具
│ ├── hobbies/ # 爱好工具
│ ├── common/ # 通用工具
│ └── devops/ # DevOps 工具
└── integration/ # 集成测试
└── mcp-server.test.ts
```
## 测试覆盖
### ✅ 存储层测试
- 代码片段 CRUD 操作
- 笔记 CRUD 操作
- 任务 CRUD 操作
- 数学资源 CRUD 操作
- 育儿里程碑 CRUD 操作
- 游戏愿望单 CRUD 操作
- 搜索功能
### ✅ 配置管理测试
- 环境变量加载
- 配置获取方法
### ✅ 编程工具测试
- 代码片段管理(保存、搜索、列出、删除)
- 项目模板生成Vite+Vue3、全栈项目
- 技术文档查询TypeScript、Vue3、Bun
- 代码审查和优化
### ✅ 家庭工具测试
- 数学资源搜索和保存
- 数学题目生成(不同年级和难度)
- 育儿里程碑记录
- 育儿提醒设置
### ✅ 爱好工具测试
- 游戏信息查询
- 游戏折扣查询
- 游戏愿望单管理
- 足球信息查询mock
### ✅ 通用工具测试
- 笔记创建、搜索、列出、删除
- 任务添加、列出、完成
### ✅ 服务器工具测试
- 服务器状态查询mock SSH
- 服务器日志查看mock SSH
- 部署功能验证
### ✅ 集成测试
- 工具注册验证
- 工具调用测试
- 错误处理测试
## 测试隔离
每个测试使用独立的临时目录和数据存储,确保测试之间不会相互影响:
```typescript
beforeEach(() => {
testContext = createTempDir();
cleanupDb = setupTestDatabase(testContext);
});
afterEach(() => {
cleanupDb();
testContext.cleanup();
});
```
## 测试统计
- **总测试数**: 77
- **通过**: 77 ✅
- **失败**: 0
- **测试文件**: 14
## 注意事项
1. **NAS 和软路由测试**: 由于需要隧道穿透,这些功能的测试被排除在外
2. **SSH 连接测试**: 使用 mock不会实际连接服务器
3. **API 测试**: 游戏和足球 API 测试可能会因为网络问题失败,但会优雅处理
4. **测试数据**: 所有测试数据存储在临时目录,测试结束后自动清理
## 调试测试
如果测试失败,可以使用以下方法调试:
```bash
# 运行单个测试并显示详细输出
bun test tests/unit/storage/database.test.ts --reporter verbose
# 使用 Node.js 调试器
bun test --inspect
```
## 添加新测试
1. 在相应的测试目录创建测试文件
2. 使用 `describe``test` 组织测试
3. 使用 `beforeEach``afterEach` 设置和清理
4. 使用测试辅助函数(`callTool`, `setupTestDatabase` 等)
示例:
```typescript
import { describe, test, expect, beforeEach, afterEach } from "bun:test";
import { callTool } from "../../helpers/tool-helper.js";
import { createTempDir } from "../../helpers/test-utils.js";
import { setupTestDatabase } from "../../helpers/database-helper.js";
describe("My Tool", () => {
let testContext: ReturnType<typeof createTempDir>;
let cleanupDb: () => void;
beforeEach(() => {
testContext = createTempDir();
cleanupDb = setupTestDatabase(testContext);
registerMyTool();
});
afterEach(() => {
cleanupDb();
testContext.cleanup();
});
test("should do something", async () => {
const result = await callTool("my_tool", { arg: "value" });
expect(result.content[0].text).toContain("expected");
});
});
```

66
tests/fixtures/test-data.ts vendored Normal file
View File

@@ -0,0 +1,66 @@
/**
* Test data fixtures
*/
import {
CodeSnippet,
Note,
Task,
BabyMilestone,
MathResource,
GameWishlist,
} from "../../src/storage/database.js";
export const testCodeSnippet: CodeSnippet = {
id: "test-snippet-1",
title: "Test Snippet",
code: "const x = 1;",
language: "typescript",
tags: ["test", "example"],
category: "utils",
createdAt: "2024-01-01T00:00:00.000Z",
updatedAt: "2024-01-01T00:00:00.000Z",
};
export const testNote: Note = {
id: "test-note-1",
title: "Test Note",
content: "This is a test note",
tags: ["test"],
createdAt: "2024-01-01T00:00:00.000Z",
updatedAt: "2024-01-01T00:00:00.000Z",
};
export const testTask: Task = {
id: "test-task-1",
title: "Test Task",
description: "This is a test task",
completed: false,
createdAt: "2024-01-01T00:00:00.000Z",
};
export const testBabyMilestone: BabyMilestone = {
id: "test-milestone-1",
title: "First Steps",
description: "Baby took first steps",
date: "2024-01-01",
createdAt: "2024-01-01T00:00:00.000Z",
};
export const testMathResource: MathResource = {
id: "test-math-1",
title: "Addition Problems",
content: "1 + 1 = 2",
grade: "1st",
difficulty: "easy",
tags: ["addition"],
createdAt: "2024-01-01T00:00:00.000Z",
};
export const testGameWishlist: GameWishlist = {
id: "test-game-1",
gameName: "Test Game",
platform: "PC",
notes: "Want to play this",
addedAt: "2024-01-01T00:00:00.000Z",
};

View File

@@ -0,0 +1,90 @@
/**
* Database test helper - creates isolated database instances for testing
*/
import { database } from "../../src/storage/database.js";
import { readFileSync } from "fs";
import { join } from "path";
import type { TestContext } from "./test-utils.js";
/**
* Setup test database with isolated database connection
* Uses MCP_TEST_DATABASE_URL if provided, otherwise uses DATABASE_URL with a test suffix
*/
export async function setupTestDatabase(testContext: TestContext): Promise<() => Promise<void>> {
// Use test database URL if provided, otherwise use main database URL
const testDbUrl = process.env.MCP_TEST_DATABASE_URL || process.env.DATABASE_URL;
if (!testDbUrl) {
throw new Error(
"MCP_TEST_DATABASE_URL or DATABASE_URL environment variable is required for tests"
);
}
// Set test database URL
const originalDbUrl = process.env.DATABASE_URL;
process.env.DATABASE_URL = testDbUrl;
// Initialize database connection
await database.initialize();
// Create tables if they don't exist (using schema.sql)
try {
const schemaPath = join(process.cwd(), "src", "storage", "schema.sql");
const schema = readFileSync(schemaPath, "utf-8");
// Execute schema (split by semicolons and execute each statement)
const statements = schema
.split(";")
.map((s) => s.trim())
.filter((s) => s.length > 0 && !s.startsWith("--"));
// We'll use the database connection directly
// Note: This is a simplified approach. In production, you might want to use a migration tool
const sql = (database as any).getSql();
for (const statement of statements) {
if (statement) {
try {
// Use postgres.unsafe() to execute raw SQL
await (sql as any).unsafe(statement);
} catch (error) {
// Ignore errors for IF NOT EXISTS statements
const errorMsg = (error as Error).message;
if (!errorMsg.includes("already exists") && !errorMsg.includes("duplicate")) {
console.warn(`Schema statement warning: ${errorMsg}`);
}
}
}
}
} catch (error) {
console.warn("Could not execute schema.sql:", error);
// Continue anyway - tables might already exist
}
// Clean up all tables before each test
await cleanupTestData();
// Return cleanup function
return async () => {
await cleanupTestData();
await database.close();
if (originalDbUrl) {
process.env.DATABASE_URL = originalDbUrl;
} else {
delete process.env.DATABASE_URL;
}
};
}
/**
* Clean up test data from all tables
*/
async function cleanupTestData(): Promise<void> {
try {
const sql = (database as any).getSql();
await sql`TRUNCATE TABLE code_snippets, notes, tasks, baby_milestones, math_resources, game_wishlist RESTART IDENTITY CASCADE`;
} catch (error) {
// Tables might not exist yet, ignore
console.warn("Could not truncate test tables:", error);
}
}

View File

@@ -0,0 +1,68 @@
/**
* Test utilities and helpers
*/
import { mkdtempSync, rmSync, existsSync } from "fs";
import { join } from "path";
import { tmpdir } from "os";
export interface TestContext {
tempDir: string;
cleanup: () => void;
}
/**
* Create a temporary directory for testing
*/
export function createTempDir(prefix = "mcp-test-"): TestContext {
const tempDir = mkdtempSync(join(tmpdir(), prefix));
return {
tempDir,
cleanup: () => {
if (existsSync(tempDir)) {
rmSync(tempDir, { recursive: true, force: true });
}
},
};
}
/**
* Set test environment variables
*/
export function setTestEnv(env: Record<string, string>): () => void {
const originalEnv: Record<string, string | undefined> = {};
for (const [key, value] of Object.entries(env)) {
originalEnv[key] = process.env[key];
process.env[key] = value;
}
return () => {
for (const [key, originalValue] of Object.entries(originalEnv)) {
if (originalValue === undefined) {
delete process.env[key];
} else {
process.env[key] = originalValue;
}
}
};
}
/**
* Wait for a specified time (for async operations)
*/
export function sleep(ms: number): Promise<void> {
return new Promise((resolve) => setTimeout(resolve, ms));
}
/**
* Mock HTTP response helper
*/
export function createMockResponse(data: unknown, status = 200) {
return {
status,
data,
headers: {},
};
}

View File

@@ -0,0 +1,35 @@
/**
* Tool testing helper - helps test MCP tools
*/
import { mcpServer } from "../../src/server.js";
import type { ToolHandler } from "../../src/server.js";
/**
* Call a tool by name with arguments
*/
export async function callTool(
toolName: string,
args: Record<string, unknown>
): Promise<{
content: Array<{ type: string; text: string }>;
isError?: boolean;
}> {
// Get the tool handler from the server
const tools = mcpServer.getTools();
const toolEntry = tools.get(toolName);
if (!toolEntry) {
throw new Error(`Tool ${toolName} not found`);
}
return await toolEntry.handler(args);
}
/**
* Get all registered tools
*/
export function getRegisteredTools(): string[] {
const tools = mcpServer.getTools();
return Array.from(tools.keys());
}

View File

@@ -0,0 +1,114 @@
/**
* MCP Server integration tests
*/
import { describe, test, expect, beforeEach } from "bun:test";
import { mcpServer } from "../../src/server.js";
import { getRegisteredTools, callTool } from "../helpers/tool-helper.js";
// Register all tools
import { registerCodeSnippetTools } from "../../src/tools/programming/codeSnippet.js";
import { registerProjectTemplateTools } from "../../src/tools/programming/projectTemplate.js";
import { registerDocsTools } from "../../src/tools/programming/docs.js";
import { registerCodeReviewTools } from "../../src/tools/programming/codeReview.js";
import { registerServerTools } from "../../src/tools/devops/server.js";
import { registerRouterTools } from "../../src/tools/devops/router.js";
import { registerMathTools } from "../../src/tools/family/math.js";
import { registerBabyTools } from "../../src/tools/family/baby.js";
import { registerFootballTools } from "../../src/tools/hobbies/football.js";
import { registerGameTools } from "../../src/tools/hobbies/games.js";
import { registerNoteTools } from "../../src/tools/common/notes.js";
import { registerTaskTools } from "../../src/tools/common/tasks.js";
describe("MCP Server Integration", () => {
beforeEach(() => {
// Register all tools
registerCodeSnippetTools();
registerProjectTemplateTools();
registerDocsTools();
registerCodeReviewTools();
registerServerTools();
registerRouterTools();
registerMathTools();
registerBabyTools();
registerFootballTools();
registerGameTools();
registerNoteTools();
registerTaskTools();
});
test("should register all tools", () => {
const tools = getRegisteredTools();
// Check that key tools are registered
expect(tools).toContain("code_snippet_save");
expect(tools).toContain("code_snippet_search");
expect(tools).toContain("project_template_create");
expect(tools).toContain("docs_typescript");
expect(tools).toContain("code_review");
expect(tools).toContain("note_create");
expect(tools).toContain("task_add");
expect(tools).toContain("math_problem_generate");
expect(tools).toContain("baby_milestone_add");
expect(tools).toContain("game_info");
expect(tools).toContain("server_status");
// Should have many tools registered
expect(tools.length).toBeGreaterThan(20);
});
test("should handle tool call with valid arguments", async () => {
const result = await callTool("docs_typescript", {});
expect(result.content).toBeDefined();
expect(result.content.length).toBeGreaterThan(0);
expect(result.content[0].type).toBe("text");
expect(result.content[0].text).toBeDefined();
});
test("should handle tool call with invalid tool name", async () => {
await expect(callTool("non_existent_tool", {})).rejects.toThrow(
"Tool non_existent_tool not found"
);
});
test("should handle tool errors gracefully", async () => {
// Call a tool that might fail (like server_status without proper config)
const result = await callTool("server_status", {});
// Should return an error response, not throw
expect(result.content).toBeDefined();
expect(result.content[0].text).toBeDefined();
});
test("should support multiple tool categories", () => {
const tools = getRegisteredTools();
// Programming tools
const programmingTools = tools.filter(
(t) =>
t.startsWith("code_") ||
t.startsWith("project_") ||
t.startsWith("docs_")
);
expect(programmingTools.length).toBeGreaterThan(0);
// Family tools
const familyTools = tools.filter(
(t) => t.startsWith("math_") || t.startsWith("baby_")
);
expect(familyTools.length).toBeGreaterThan(0);
// Common tools
const commonTools = tools.filter(
(t) => t.startsWith("note_") || t.startsWith("task_")
);
expect(commonTools.length).toBeGreaterThan(0);
// Hobby tools
const hobbyTools = tools.filter(
(t) => t.startsWith("game_") || t.startsWith("football_")
);
expect(hobbyTools.length).toBeGreaterThan(0);
});
});

14
tests/tsconfig.json Normal file
View File

@@ -0,0 +1,14 @@
{
"extends": "../tsconfig.json",
"compilerOptions": {
"types": ["node"],
"noEmit": true,
"skipLibCheck": true,
"typeRoots": ["../types", "../node_modules/@types"],
"moduleResolution": "bundler",
"rootDir": ".."
},
"include": ["**/*.ts"],
"exclude": []
}

View File

@@ -0,0 +1,82 @@
/**
* Configuration management tests
*/
// @ts-ignore - Bun test types are built-in
import { describe, test, expect, beforeEach, afterEach } from "bun:test";
import { configManager } from "../../../src/storage/config.js";
import { setTestEnv } from "../../helpers/test-utils.js";
describe("ConfigManager", () => {
let cleanupEnv: () => void;
beforeEach(() => {
cleanupEnv = setTestEnv({
NAS_HOST: "test-nas-host",
NAS_USERNAME: "test-user",
NAS_PASSWORD: "test-password",
NAS_PROTOCOL: "smb",
SERVER_HOST: "test-server",
SERVER_USERNAME: "test-server-user",
SERVER_PORT: "2222",
SERVER_KEY_PATH: "/test/key/path",
ROUTER_HOST: "test-router",
ROUTER_USERNAME: "test-router-user",
ROUTER_PASSWORD: "test-router-password",
FOOTBALL_API_KEY: "test-football-key",
});
configManager.reload();
});
afterEach(() => {
cleanupEnv();
});
test("should load NAS configuration from environment", () => {
const nasConfig = configManager.getNASConfig();
expect(nasConfig.host).toBe("test-nas-host");
expect(nasConfig.username).toBe("test-user");
expect(nasConfig.password).toBe("test-password");
expect(nasConfig.protocol).toBe("smb");
});
test("should load server configuration from environment", () => {
const serverConfig = configManager.getServerConfig();
expect(serverConfig.host).toBe("test-server");
expect(serverConfig.username).toBe("test-server-user");
expect(serverConfig.port).toBe(2222);
expect(serverConfig.keyPath).toBe("/test/key/path");
});
test("should load router configuration from environment", () => {
const routerConfig = configManager.getRouterConfig();
expect(routerConfig.host).toBe("test-router");
expect(routerConfig.username).toBe("test-router-user");
expect(routerConfig.password).toBe("test-router-password");
});
test("should get full configuration", () => {
const config = configManager.getConfig();
expect(config.nas.host).toBe("test-nas-host");
expect(config.server.host).toBe("test-server");
expect(config.router.host).toBe("test-router");
expect(config.footballApiKey).toBe("test-football-key");
});
test("should handle missing environment variables", () => {
cleanupEnv();
// Clear all relevant env vars
delete process.env.NAS_HOST;
delete process.env.NAS_USERNAME;
delete process.env.NAS_PASSWORD;
delete process.env.NAS_PROTOCOL;
configManager.reload();
const nasConfig = configManager.getNASConfig();
expect(nasConfig.host).toBeUndefined();
});
});

View File

@@ -0,0 +1,212 @@
/**
* Database storage layer tests
*/
import { describe, test, expect, beforeEach, afterEach } from "bun:test";
import { database } from "../../../src/storage/database.js";
import { createTempDir } from "../../helpers/test-utils.js";
import { setupTestDatabase } from "../../helpers/database-helper.js";
import {
testCodeSnippet,
testNote,
testTask,
testBabyMilestone,
testMathResource,
testGameWishlist,
} from "../../fixtures/test-data.js";
describe("Database", () => {
let testContext: ReturnType<typeof createTempDir>;
let cleanupDb: () => void;
beforeEach(() => {
testContext = createTempDir();
cleanupDb = setupTestDatabase(testContext);
});
afterEach(() => {
cleanupDb();
testContext.cleanup();
});
describe("Code Snippets", () => {
test("should save and retrieve code snippet", () => {
database.saveCodeSnippet(testCodeSnippet);
const snippet = database.getCodeSnippet(testCodeSnippet.id);
expect(snippet).toBeDefined();
expect(snippet?.title).toBe(testCodeSnippet.title);
expect(snippet?.code).toBe(testCodeSnippet.code);
expect(snippet?.language).toBe(testCodeSnippet.language);
});
test("should list all code snippets", () => {
database.saveCodeSnippet(testCodeSnippet);
const snippets = database.getCodeSnippets();
expect(snippets.length).toBeGreaterThan(0);
expect(snippets.find((s) => s.id === testCodeSnippet.id)).toBeDefined();
});
test("should search code snippets", () => {
database.saveCodeSnippet(testCodeSnippet);
const results = database.searchCodeSnippets("Test");
expect(results.length).toBeGreaterThan(0);
expect(results[0].title).toContain("Test");
});
test("should search code snippets by tags", () => {
database.saveCodeSnippet(testCodeSnippet);
const results = database.searchCodeSnippets("", ["test"]);
expect(results.length).toBeGreaterThan(0);
expect(results[0].tags).toContain("test");
});
test("should delete code snippet", () => {
database.saveCodeSnippet(testCodeSnippet);
const deleted = database.deleteCodeSnippet(testCodeSnippet.id);
expect(deleted).toBe(true);
expect(database.getCodeSnippet(testCodeSnippet.id)).toBeUndefined();
});
test("should update existing code snippet", () => {
database.saveCodeSnippet(testCodeSnippet);
const updated = {
...testCodeSnippet,
title: "Updated Title",
};
database.saveCodeSnippet(updated);
const snippet = database.getCodeSnippet(testCodeSnippet.id);
expect(snippet?.title).toBe("Updated Title");
});
});
describe("Notes", () => {
test("should save and retrieve note", () => {
database.saveNote(testNote);
const note = database.getNote(testNote.id);
expect(note).toBeDefined();
expect(note?.title).toBe(testNote.title);
expect(note?.content).toBe(testNote.content);
});
test("should list all notes", () => {
database.saveNote(testNote);
const notes = database.getNotes();
expect(notes.length).toBeGreaterThan(0);
expect(notes.find((n) => n.id === testNote.id)).toBeDefined();
});
test("should search notes", () => {
database.saveNote(testNote);
const results = database.searchNotes("Test");
expect(results.length).toBeGreaterThan(0);
expect(results[0].title).toContain("Test");
});
test("should delete note", () => {
database.saveNote(testNote);
const deleted = database.deleteNote(testNote.id);
expect(deleted).toBe(true);
expect(database.getNote(testNote.id)).toBeUndefined();
});
});
describe("Tasks", () => {
test("should save and retrieve task", () => {
database.saveTask(testTask);
const task = database.getTask(testTask.id);
expect(task).toBeDefined();
expect(task?.title).toBe(testTask.title);
expect(task?.completed).toBe(false);
});
test("should list all tasks", () => {
database.saveTask(testTask);
const tasks = database.getTasks();
expect(tasks.length).toBeGreaterThan(0);
expect(tasks.find((t) => t.id === testTask.id)).toBeDefined();
});
test("should filter tasks by completion status", () => {
database.saveTask(testTask);
const completedTask = { ...testTask, id: "task-2", completed: true };
database.saveTask(completedTask);
const pendingTasks = database.getTasks(false);
const completedTasks = database.getTasks(true);
expect(pendingTasks.length).toBeGreaterThan(0);
expect(completedTasks.length).toBeGreaterThan(0);
expect(pendingTasks.find((t) => t.id === testTask.id)).toBeDefined();
expect(completedTasks.find((t) => t.id === "task-2")).toBeDefined();
});
});
describe("Baby Milestones", () => {
test("should save and retrieve baby milestone", () => {
database.saveBabyMilestone(testBabyMilestone);
const milestones = database.getBabyMilestones();
expect(milestones.length).toBeGreaterThan(0);
expect(milestones.find((m) => m.id === testBabyMilestone.id)).toBeDefined();
});
});
describe("Math Resources", () => {
test("should save and retrieve math resource", () => {
database.saveMathResource(testMathResource);
const resources = database.getMathResources();
expect(resources.length).toBeGreaterThan(0);
expect(resources.find((r) => r.id === testMathResource.id)).toBeDefined();
});
test("should search math resources", () => {
database.saveMathResource(testMathResource);
const results = database.searchMathResources("Addition");
expect(results.length).toBeGreaterThan(0);
expect(results[0].title).toContain("Addition");
});
test("should filter math resources by grade", () => {
database.saveMathResource(testMathResource);
const results = database.searchMathResources("", "1st");
expect(results.length).toBeGreaterThan(0);
expect(results[0].grade).toBe("1st");
});
});
describe("Game Wishlist", () => {
test("should save and retrieve game wishlist", () => {
database.saveGameWishlist(testGameWishlist);
const games = database.getGameWishlist();
expect(games.length).toBeGreaterThan(0);
expect(games.find((g) => g.id === testGameWishlist.id)).toBeDefined();
});
test("should delete game from wishlist", () => {
database.saveGameWishlist(testGameWishlist);
const deleted = database.deleteGameWishlist(testGameWishlist.id);
expect(deleted).toBe(true);
const games = database.getGameWishlist();
expect(games.find((g) => g.id === testGameWishlist.id)).toBeUndefined();
});
});
});

View File

@@ -0,0 +1,97 @@
/*
* @Date: 2026-01-07 09:11:08
* @LastEditors: 陈子健
* @LastEditTime: 2026-01-07 10:04:45
* @FilePath: /cloud-mcp/tests/unit/tools/common/notes.test.ts
*/
/**
* Notes tools tests
*/
import { describe, test, expect, beforeEach, afterEach } from "bun:test";
import { registerNoteTools } from "../../../../src/tools/common/notes.js";
import { callTool } from "../../../helpers/tool-helper.js";
import { createTempDir } from "../../../helpers/test-utils.js";
import { setupTestDatabase } from "../../../helpers/database-helper.js";
describe("Notes Tools", () => {
let testContext: ReturnType<typeof createTempDir>;
let cleanupDb: () => void;
beforeEach(() => {
testContext = createTempDir();
cleanupDb = setupTestDatabase(testContext);
registerNoteTools();
});
afterEach(() => {
cleanupDb();
testContext.cleanup();
});
test("should create note", async () => {
const result = await callTool("note_create", {
title: "Test Note",
content: "This is a test note",
tags: ["test"],
});
expect(result.content[0].text).toContain("saved successfully");
expect(result.content[0].text).toContain("Test Note");
});
test("should search notes", async () => {
// Create a note first
await callTool("note_create", {
title: "Test Note",
content: "This is a test note",
tags: ["test"],
});
const result = await callTool("note_search", {
query: "Test",
});
expect(result.content[0].text).toContain("Found");
expect(result.content[0].text).toContain("Test Note");
});
test("should list notes", async () => {
// Create a note
await callTool("note_create", {
title: "Test Note",
content: "This is a test note",
});
const result = await callTool("note_list", {});
expect(result.content[0].text).toContain("Total");
expect(result.content[0].text).toContain("Test Note");
});
test("should delete note", async () => {
// Create a note
const createResult = await callTool("note_create", {
title: "Test Note",
content: "This is a test note",
});
// Extract ID
const idMatch = createResult.content[0].text.match(/ID: ([a-f0-9-]+)/);
if (!idMatch) {
throw new Error("Could not extract ID");
}
const id = idMatch[1];
// Delete it
const result = await callTool("note_delete", { id });
expect(result.content[0].text).toContain("deleted successfully");
});
test("should handle empty notes list", async () => {
const result = await callTool("note_list", {});
expect(result.content[0].text).toMatch(/No notes found|Use note_create/i);
});
});

View File

@@ -0,0 +1,92 @@
/*
* @Date: 2026-01-07 09:11:15
* @LastEditors: 陈子健
* @LastEditTime: 2026-01-07 10:04:50
* @FilePath: /cloud-mcp/tests/unit/tools/common/tasks.test.ts
*/
/**
* Tasks tools tests
*/
import { describe, test, expect, beforeEach, afterEach } from "bun:test";
import { registerTaskTools } from "../../../../src/tools/common/tasks.js";
import { callTool } from "../../../helpers/tool-helper.js";
import { createTempDir } from "../../../helpers/test-utils.js";
import { setupTestDatabase } from "../../../helpers/database-helper.js";
describe("Tasks Tools", () => {
let testContext: ReturnType<typeof createTempDir>;
let cleanupDb: () => void;
beforeEach(() => {
testContext = createTempDir();
cleanupDb = setupTestDatabase(testContext);
registerTaskTools();
});
afterEach(() => {
cleanupDb();
testContext.cleanup();
});
test("should add task", async () => {
const result = await callTool("task_add", {
title: "Test Task",
description: "This is a test task",
});
expect(result.content[0].text).toContain("added successfully");
expect(result.content[0].text).toContain("Test Task");
});
test("should list tasks", async () => {
// Add a task first
await callTool("task_add", {
title: "Test Task",
});
const result = await callTool("task_list", {});
expect(result.content[0].text).toContain("Tasks");
expect(result.content[0].text).toContain("Test Task");
});
test("should filter tasks by completion status", async () => {
// Add a task
await callTool("task_add", {
title: "Test Task",
});
const allTasks = await callTool("task_list", {});
const pendingTasks = await callTool("task_list", { completed: false });
expect(allTasks.content[0].text).toContain("Tasks");
expect(pendingTasks.content[0].text).toContain("Pending");
});
test("should complete task", async () => {
// Add a task
const addResult = await callTool("task_add", {
title: "Test Task",
});
// Extract ID
const idMatch = addResult.content[0].text.match(/ID: ([a-f0-9-]+)/);
if (!idMatch) {
throw new Error("Could not extract ID");
}
const id = idMatch[1];
// Complete it
const result = await callTool("task_complete", { id });
expect(result.content[0].text).toContain("marked as completed");
});
test("should handle empty tasks list", async () => {
const result = await callTool("task_list", {});
// The message varies based on completion status filter
expect(result.content[0].text).toMatch(/No.*tasks|Use task_add/i);
});
});

View File

@@ -0,0 +1,71 @@
/*
* @Date: 2026-01-07 09:11:20
* @LastEditors: 陈子健
* @LastEditTime: 2026-01-07 10:04:41
* @FilePath: /cloud-mcp/tests/unit/tools/devops/server.test.ts
*/
/**
* Server tools tests (with mocked SSH)
*/
import { describe, test, expect, beforeEach } from "bun:test";
import { registerServerTools } from "../../../../src/tools/devops/server.js";
import { callTool } from "../../../helpers/tool-helper.js";
import { setTestEnv } from "../../../helpers/test-utils.js";
describe("Server Tools", () => {
let cleanupEnv: () => void;
beforeEach(() => {
cleanupEnv = setTestEnv({
SERVER_HOST: "test-server",
SERVER_USERNAME: "test-user",
SERVER_PORT: "22",
SERVER_KEY_PATH: "/test/key/path",
});
registerServerTools();
});
afterEach(() => {
cleanupEnv();
});
test("should handle server status request", async () => {
const result = await callTool("server_status", {});
// Should either return status or handle connection error gracefully
expect(result.content[0].text).toBeDefined();
// Since we don't have actual SSH connection, it will likely return an error
// which is expected behavior
}, 15000); // Longer timeout for SSH attempts
test("should handle server logs request", async () => {
const result = await callTool("server_logs", {
logPath: "/var/log/test.log",
lines: 10,
});
// Should either return logs or handle connection error gracefully
expect(result.content[0].text).toBeDefined();
}, 15000);
test("should handle deploy request", async () => {
const result = await callTool("server_deploy", {
localPath: "/local/path",
remotePath: "/remote/path",
command: "pm2 restart app",
});
expect(result.content[0].text).toContain("Deployment initiated");
});
test("should handle missing server configuration", async () => {
cleanupEnv();
cleanupEnv = setTestEnv({});
registerServerTools();
const result = await callTool("server_status", {});
expect(result.content[0].text).toContain("configuration not found");
});
});

View File

@@ -0,0 +1,70 @@
/**
* Baby tools tests
*/
import { describe, test, expect, beforeEach, afterEach } from "bun:test";
import { registerBabyTools } from "../../../../src/tools/family/baby.js";
import { callTool } from "../../../helpers/tool-helper.js";
import { createTempDir } from "../../../helpers/test-utils.js";
import { setupTestDatabase } from "../../../helpers/database-helper.js";
describe("Baby Tools", () => {
let testContext: ReturnType<typeof createTempDir>;
let cleanupDb: () => void;
beforeEach(() => {
testContext = createTempDir();
cleanupDb = setupTestDatabase(testContext);
registerBabyTools();
});
afterEach(() => {
cleanupDb();
testContext.cleanup();
});
test("should add baby milestone", async () => {
const result = await callTool("baby_milestone_add", {
title: "First Steps",
description: "Baby took first steps today",
date: "2024-01-01",
});
expect(result.content[0].text).toContain("recorded successfully");
expect(result.content[0].text).toContain("First Steps");
});
test("should list baby milestones", async () => {
// Add a milestone first
await callTool("baby_milestone_add", {
title: "First Steps",
description: "Baby took first steps",
date: "2024-01-01",
});
const result = await callTool("baby_milestone_list", {});
expect(result.content[0].text).toContain("Total");
expect(result.content[0].text).toContain("First Steps");
});
test("should set baby reminder", async () => {
const result = await callTool("baby_reminder_set", {
title: "Vaccine",
description: "DTaP vaccine due",
date: "2024-02-01",
type: "vaccine",
});
expect(result.content[0].text).toContain("reminder set successfully");
expect(result.content[0].text).toContain("Vaccine");
});
test("should handle empty milestones list", async () => {
const result = await callTool("baby_milestone_list", {});
expect(result.content[0].text).toMatch(
/No milestones recorded|Use baby_milestone_add/i
);
});
});

View File

@@ -0,0 +1,109 @@
/*
* @Date: 2026-01-07 09:10:17
* @LastEditors: 陈子健
* @LastEditTime: 2026-01-07 10:04:38
* @FilePath: /cloud-mcp/tests/unit/tools/family/math.test.ts
*/
/**
* Math tools tests
*/
import { describe, test, expect, beforeEach, afterEach } from "bun:test";
import { registerMathTools } from "../../../../src/tools/family/math.js";
import { callTool } from "../../../helpers/tool-helper.js";
import { createTempDir } from "../../../helpers/test-utils.js";
import { setupTestDatabase } from "../../../helpers/database-helper.js";
describe("Math Tools", () => {
let testContext: ReturnType<typeof createTempDir>;
let cleanupDb: () => void;
beforeEach(() => {
testContext = createTempDir();
cleanupDb = setupTestDatabase(testContext);
registerMathTools();
});
afterEach(() => {
cleanupDb();
testContext.cleanup();
});
test("should generate math problems for elementary grade", async () => {
const result = await callTool("math_problem_generate", {
grade: "1st",
difficulty: "easy",
count: 5,
});
expect(result.content[0].text).toContain("Generated");
expect(result.content[0].text).toContain("1st");
expect(result.content[0].text).toContain("problem");
});
test("should generate math problems for middle school", async () => {
const result = await callTool("math_problem_generate", {
grade: "middle",
difficulty: "medium",
topic: "algebra",
count: 3,
});
expect(result.content[0].text).toContain("Generated");
expect(result.content[0].text).toContain("middle");
expect(result.content[0].text).toContain("algebra");
});
test("should save math resource", async () => {
const result = await callTool("math_resource_save", {
title: "Addition Worksheet",
content: "1 + 1 = 2",
grade: "1st",
tags: ["addition"],
});
expect(result.content[0].text).toContain("saved successfully");
expect(result.content[0].text).toContain("Addition Worksheet");
});
test("should search math resources", async () => {
// First save a resource
await callTool("math_resource_save", {
title: "Addition Worksheet",
content: "1 + 1 = 2",
grade: "1st",
tags: ["addition"],
});
const result = await callTool("math_resource_search", {
query: "Addition",
});
expect(result.content[0].text).toContain("Found");
expect(result.content[0].text).toContain("Addition Worksheet");
});
test("should search math resources by grade", async () => {
await callTool("math_resource_save", {
title: "Addition Worksheet",
content: "1 + 1 = 2",
grade: "1st",
tags: ["addition"],
});
const result = await callTool("math_resource_search", {
query: "Addition",
grade: "1st",
});
expect(result.content[0].text).toContain("Found");
});
test("should handle search with no results", async () => {
const result = await callTool("math_resource_search", {
query: "NonExistent",
});
expect(result.content[0].text).toContain("No math resources found");
});
});

View File

@@ -0,0 +1,61 @@
/**
* Football tools tests
*/
import { describe, test, expect, beforeEach } from "bun:test";
import { registerFootballTools } from "../../../../src/tools/hobbies/football.js";
import { callTool } from "../../../helpers/tool-helper.js";
import { setTestEnv } from "../../../helpers/test-utils.js";
describe("Football Tools", () => {
let cleanupEnv: () => void;
beforeEach(() => {
cleanupEnv = setTestEnv({
FOOTBALL_API_KEY: "test-key",
});
registerFootballTools();
});
afterEach(() => {
cleanupEnv();
});
test("should get football matches", async () => {
const result = await callTool("football_matches", {
days: 7,
});
// Should return matches or placeholder message
expect(result.content[0].text).toBeDefined();
expect(result.content[0].text.length).toBeGreaterThan(0);
});
test("should get team information", async () => {
const result = await callTool("football_team_info", {
team: "Manchester United",
});
// Should return team info or placeholder
expect(result.content[0].text).toBeDefined();
});
test("should get league standings", async () => {
const result = await callTool("football_standings", {
league: "Premier League",
});
// Should return standings or placeholder
expect(result.content[0].text).toBeDefined();
});
test("should handle missing API key gracefully", async () => {
cleanupEnv();
cleanupEnv = setTestEnv({});
registerFootballTools();
const result = await callTool("football_matches", {});
expect(result.content[0].text).toContain("FOOTBALL_API_KEY");
});
});

View File

@@ -0,0 +1,100 @@
/**
* Game tools tests
*/
import { describe, test, expect, beforeEach, afterEach } from "bun:test";
import { registerGameTools } from "../../../../src/tools/hobbies/games.js";
import { callTool } from "../../../helpers/tool-helper.js";
import { createTempDir } from "../../../helpers/test-utils.js";
import { setupTestDatabase } from "../../../helpers/database-helper.js";
describe("Game Tools", () => {
let testContext: ReturnType<typeof createTempDir>;
let cleanupDb: () => void;
beforeEach(() => {
testContext = createTempDir();
cleanupDb = setupTestDatabase(testContext);
registerGameTools();
});
afterEach(() => {
cleanupDb();
testContext.cleanup();
});
test("should get game information", async () => {
const result = await callTool("game_info", {
name: "Minecraft",
});
// Should either return game info or handle API error gracefully
expect(result.content[0].text).toBeDefined();
}, 10000); // Longer timeout for API calls
test("should get game deals", async () => {
const result = await callTool("game_deals", {
platform: "steam",
});
// Should either return deals or handle API error gracefully
expect(result.content[0].text).toBeDefined();
}, 10000);
test("should add game to wishlist", async () => {
const result = await callTool("game_wishlist", {
action: "add",
gameName: "Test Game",
platform: "PC",
});
expect(result.content[0].text).toContain("added to wishlist");
expect(result.content[0].text).toContain("Test Game");
});
test("should list game wishlist", async () => {
// Add a game first
await callTool("game_wishlist", {
action: "add",
gameName: "Test Game",
platform: "PC",
});
const result = await callTool("game_wishlist", {
action: "list",
});
expect(result.content[0].text).toMatch(/wishlist|Test Game/i);
});
test("should remove game from wishlist", async () => {
// Add a game first
const addResult = await callTool("game_wishlist", {
action: "add",
gameName: "Test Game",
});
// Extract ID
const idMatch = addResult.content[0].text.match(/ID: ([a-f0-9-]+)/);
if (!idMatch) {
throw new Error("Could not extract ID");
}
const id = idMatch[1];
// Remove it
const result = await callTool("game_wishlist", {
action: "remove",
id,
});
expect(result.content[0].text).toContain("removed from wishlist");
});
test("should handle empty wishlist", async () => {
const result = await callTool("game_wishlist", {
action: "list",
});
expect(result.content[0].text).toMatch(/empty|Use game_wishlist.*add/i);
});
});

View File

@@ -0,0 +1,71 @@
/**
* Code review tools tests
*/
import { describe, test, expect, beforeEach } from "bun:test";
import { registerCodeReviewTools } from "../../../../src/tools/programming/codeReview.js";
import { callTool } from "../../../helpers/tool-helper.js";
describe("Code Review Tools", () => {
beforeEach(() => {
registerCodeReviewTools();
});
test("should review code and find issues", async () => {
const result = await callTool("code_review", {
code: "let x: any = 1;",
language: "typescript",
});
expect(result.content[0].text).toContain("Code Review");
expect(result.content[0].text).toContain("any");
});
test("should suggest improvements", async () => {
const result = await callTool("code_review", {
code: "console.log('test');",
language: "javascript",
});
expect(result.content[0].text).toContain("Suggestions");
expect(result.content[0].text).toContain("console.log");
});
test("should detect var usage", async () => {
const result = await callTool("code_review", {
code: "var x = 1;",
language: "javascript",
});
expect(result.content[0].text).toContain("var");
});
test("should provide optimization suggestions", async () => {
const result = await callTool("code_optimize", {
code: "if (x == 1) { }",
language: "javascript",
});
expect(result.content[0].text).toContain("Optimization");
expect(result.content[0].text).toContain("===");
});
test("should suggest Vue optimizations", async () => {
const result = await callTool("code_optimize", {
code: "<div v-for='item in items'>",
language: "vue",
});
expect(result.content[0].text).toContain("Optimization");
expect(result.content[0].text).toContain(":key");
});
test("should handle code with no issues", async () => {
const result = await callTool("code_review", {
code: "const x: number = 1;",
language: "typescript",
});
expect(result.content[0].text).toContain("Code Review");
});
});

View File

@@ -0,0 +1,100 @@
/**
* Code snippet tools tests
*/
import { describe, test, expect, beforeEach, afterEach } from "bun:test";
import { registerCodeSnippetTools } from "../../../../src/tools/programming/codeSnippet.js";
import { callTool } from "../../../helpers/tool-helper.js";
import { createTempDir } from "../../../helpers/test-utils.js";
import { setupTestDatabase } from "../../../helpers/database-helper.js";
describe("Code Snippet Tools", () => {
let testContext: ReturnType<typeof createTempDir>;
let cleanupDb: () => void;
beforeEach(() => {
testContext = createTempDir();
cleanupDb = setupTestDatabase(testContext);
registerCodeSnippetTools();
});
afterEach(() => {
cleanupDb();
testContext.cleanup();
});
test("should save code snippet", async () => {
const result = await callTool("code_snippet_save", {
title: "Test Snippet",
code: "const x = 1;",
language: "typescript",
tags: ["test"],
});
expect(result.content[0].text).toContain("saved successfully");
expect(result.content[0].text).toContain("Test Snippet");
});
test("should search code snippets", async () => {
// First save a snippet
await callTool("code_snippet_save", {
title: "Test Snippet",
code: "const x = 1;",
language: "typescript",
tags: ["test"],
});
// Then search
const result = await callTool("code_snippet_search", {
query: "Test",
});
expect(result.content[0].text).toContain("Found");
expect(result.content[0].text).toContain("Test Snippet");
});
test("should list code snippets", async () => {
// Save a snippet
await callTool("code_snippet_save", {
title: "Test Snippet",
code: "const x = 1;",
language: "typescript",
tags: ["test"],
});
const result = await callTool("code_snippet_list", {});
expect(result.content[0].text).toContain("Total");
expect(result.content[0].text).toContain("Test Snippet");
});
test("should delete code snippet", async () => {
// Save a snippet
const saveResult = await callTool("code_snippet_save", {
title: "Test Snippet",
code: "const x = 1;",
language: "typescript",
tags: ["test"],
});
// Extract ID from save result
const idMatch = saveResult.content[0].text.match(/ID: ([a-f0-9-]+)/);
if (!idMatch) {
throw new Error("Could not extract ID from save result");
}
const id = idMatch[1];
// Delete it
const result = await callTool("code_snippet_delete", { id });
expect(result.content[0].text).toContain("deleted successfully");
});
test("should handle search with no results", async () => {
const result = await callTool("code_snippet_search", {
query: "NonExistent",
});
expect(result.content[0].text).toContain("No code snippets found");
});
});

View File

@@ -0,0 +1,61 @@
/**
* Documentation tools tests
*/
import { describe, test, expect, beforeEach } from "bun:test";
import { registerDocsTools } from "../../../../src/tools/programming/docs.js";
import { callTool } from "../../../helpers/tool-helper.js";
describe("Documentation Tools", () => {
beforeEach(() => {
registerDocsTools();
});
test("should get TypeScript documentation", async () => {
const result = await callTool("docs_typescript", {});
expect(result.content[0].text).toContain("TypeScript Documentation");
expect(result.content[0].text).toContain("typescriptlang.org");
});
test("should get TypeScript documentation with topic", async () => {
const result = await callTool("docs_typescript", {
topic: "generics",
});
expect(result.content[0].text).toContain("TypeScript Documentation");
expect(result.content[0].text).toContain("generics");
});
test("should get Vue3 documentation", async () => {
const result = await callTool("docs_vue3", {});
expect(result.content[0].text).toContain("Vue 3 Documentation");
expect(result.content[0].text).toContain("vuejs.org");
});
test("should get Vue3 documentation with topic", async () => {
const result = await callTool("docs_vue3", {
topic: "composition",
});
expect(result.content[0].text).toContain("Vue 3 Documentation");
expect(result.content[0].text).toContain("composition");
});
test("should get Bun documentation", async () => {
const result = await callTool("docs_bun", {});
expect(result.content[0].text).toContain("Bun Documentation");
expect(result.content[0].text).toContain("bun.sh");
});
test("should get Bun documentation with topic", async () => {
const result = await callTool("docs_bun", {
topic: "runtime",
});
expect(result.content[0].text).toContain("Bun Documentation");
expect(result.content[0].text).toContain("runtime");
});
});

View File

@@ -0,0 +1,98 @@
/**
* Project template tools tests
*/
import { describe, test, expect, beforeEach, afterEach } from "bun:test";
import { registerProjectTemplateTools } from "../../../../src/tools/programming/projectTemplate.js";
import { callTool } from "../../../helpers/tool-helper.js";
import { createTempDir } from "../../../helpers/test-utils.js";
import { existsSync, readFileSync } from "fs";
import { join } from "path";
describe("Project Template Tools", () => {
let testContext: ReturnType<typeof createTempDir>;
beforeEach(() => {
testContext = createTempDir();
registerProjectTemplateTools();
});
afterEach(() => {
testContext.cleanup();
});
test("should create Vite + Vue3 project", async () => {
const projectName = "test-vue-project";
const projectPath = join(testContext.tempDir, projectName);
const result = await callTool("project_template_create", {
name: projectName,
path: testContext.tempDir,
});
expect(result.content[0].text).toContain("created successfully");
expect(existsSync(join(projectPath, "package.json"))).toBe(true);
expect(existsSync(join(projectPath, "vite.config.ts"))).toBe(true);
expect(existsSync(join(projectPath, "src", "main.ts"))).toBe(true);
expect(existsSync(join(projectPath, "src", "App.vue"))).toBe(true);
});
test("should create project with Pinia", async () => {
const projectName = "test-vue-pinia";
const projectPath = join(testContext.tempDir, projectName);
await callTool("project_template_create", {
name: projectName,
path: testContext.tempDir,
usePinia: true,
});
const packageJson = JSON.parse(
readFileSync(join(projectPath, "package.json"), "utf-8")
);
expect(packageJson.dependencies.pinia).toBeDefined();
});
test("should create fullstack project", async () => {
const projectName = "test-fullstack";
const projectPath = join(testContext.tempDir, projectName);
const result = await callTool("project_template_create_fullstack", {
name: projectName,
path: testContext.tempDir,
});
expect(result.content[0].text).toContain("created successfully");
expect(existsSync(join(projectPath, "frontend", "package.json"))).toBe(
true
);
expect(existsSync(join(projectPath, "backend", "package.json"))).toBe(true);
expect(existsSync(join(projectPath, "backend", "src", "index.ts"))).toBe(
true
);
});
test("should list available templates", async () => {
const result = await callTool("project_template_list", {});
expect(result.content[0].text).toContain("Available project templates");
expect(result.content[0].text).toContain("Vite + Vue3");
expect(result.content[0].text).toContain("Fullstack");
});
test("should handle existing directory error", async () => {
const projectName = "existing-project";
const projectPath = join(testContext.tempDir, projectName);
// Create directory first
const { mkdirSync } = await import("fs");
mkdirSync(projectPath, { recursive: true });
const result = await callTool("project_template_create", {
name: projectName,
path: testContext.tempDir,
});
expect(result.content[0].text).toContain("already exists");
});
});

View File

@@ -4,7 +4,7 @@
"module": "ESNext",
"lib": ["ES2022"],
"moduleResolution": "bundler",
"types": ["bun-types", "node"],
"types": ["node"],
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
@@ -19,7 +19,6 @@
"noImplicitReturns": true,
"noFallthroughCasesInSwitch": true
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist"]
"include": ["src/**/*", "types/**/*"],
"exclude": ["node_modules", "dist", "tests"]
}

9
tsconfig.test.json Normal file
View File

@@ -0,0 +1,9 @@
{
"extends": "./tsconfig.json",
"compilerOptions": {
"types": ["node", "bun-types"],
"noEmit": true
},
"include": ["tests/**/*", "src/**/*"]
}

59
types/bun-test.d.ts vendored Normal file
View File

@@ -0,0 +1,59 @@
/*
* @Date: 2026-01-07 15:24:14
* @LastEditors: 陈子健
* @LastEditTime: 2026-01-07 15:26:23
* @FilePath: /cloud-mcp/types/bun-test.d.ts
*/
/**
* Type declarations for Bun test framework
* This file provides type definitions for Bun's built-in test framework
* Place this file in a types directory and include it in tsconfig.json
*/
declare module "bun:test" {
export function describe(name: string, fn: () => void | Promise<void>): void;
export function test(
name: string,
fn: () => void | Promise<void>,
timeout?: number
): void;
export function it(
name: string,
fn: () => void | Promise<void>,
timeout?: number
): void;
export function expect(actual: any): {
toBe(expected: any): void;
toBeDefined(): void;
toBeUndefined(): void;
toBeNull(): void;
toBeTruthy(): void;
toBeFalsy(): void;
toEqual(expected: any): void;
toContain(expected: any): void;
toMatch(pattern: string | RegExp): void;
toThrow(error?: string | RegExp | Error): void;
toBeGreaterThan(expected: number): void;
toBeLessThan(expected: number): void;
toBeGreaterThanOrEqual(expected: number): void;
toBeLessThanOrEqual(expected: number): void;
toBeCloseTo(expected: number, precision?: number): void;
toBeInstanceOf(expected: any): void;
not: {
toBe(expected: any): void;
toBeDefined(): void;
toBeUndefined(): void;
toBeNull(): void;
toBeTruthy(): void;
toBeFalsy(): void;
toEqual(expected: any): void;
toContain(expected: any): void;
toMatch(pattern: string | RegExp): void;
toThrow(error?: string | RegExp | Error): void;
};
};
export function beforeEach(fn: () => void | Promise<void>): void;
export function afterEach(fn: () => void | Promise<void>): void;
export function beforeAll(fn: () => void | Promise<void>): void;
export function afterAll(fn: () => void | Promise<void>): void;
}