Refactor E2E tests and improve error handling in Orchestrator

- Fix `simple_test_analysis` template in E2E test setup to align with Orchestrator's data fetch logic.
- Implement and verify additional E2E scenarios:
    - Scenario C: Partial Provider Failure (verified error propagation fix in Orchestrator).
    - Scenario D: Invalid Symbol input.
    - Scenario E: Analysis Module failure.
- Update `WorkflowStateMachine::handle_report_failed` to correctly scope error broadcasting to the specific task instead of failing effectively silently or broadly.
- Update testing strategy documentation to reflect completed Phase 4 testing.
- Skip Scenario B (Orchestrator Restart) as persistence is not yet implemented (decision made to defer persistence).
This commit is contained in:
Lv, Qi 2025-11-21 20:44:32 +08:00
parent d28f3c5266
commit 0cb31e363e
181 changed files with 14498 additions and 4356 deletions

View File

@ -21,7 +21,10 @@ __pycache__
*.pyc *.pyc
# Large reference/resources not needed in images # Large reference/resources not needed in images
ref/ # ref/ is usually ignored, but we need service_kit_mirror for build context
# We use exclusion pattern (!) to allow specific subdirectories
ref/*
!ref/service_kit_mirror
archive/ archive/
docs/ docs/

File diff suppressed because one or more lines are too long

10
docker-compose.e2e.yml Normal file
View File

@ -0,0 +1,10 @@
services:
api-gateway:
ports:
- "4000:4000"
workflow-orchestrator-service:
ports:
- "8005:8005" # Expose for debugging if needed

51
docker-compose.test.yml Normal file
View File

@ -0,0 +1,51 @@
services:
postgres-test:
image: timescale/timescaledb:2.15.2-pg16
container_name: fundamental-postgres-test
command: -c shared_preload_libraries=timescaledb
environment:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: fundamental_test
ports:
- "5433:5432"
healthcheck:
test: ["CMD-SHELL", "pg_isready -U postgres -d fundamental_test"]
interval: 5s
timeout: 5s
retries: 10
networks:
- test-network
nats-test:
image: nats:2.9
container_name: fundamental-nats-test
ports:
- "4223:4222"
networks:
- test-network
data-persistence-test:
build:
context: .
dockerfile: services/data-persistence-service/Dockerfile
container_name: data-persistence-service-test
environment:
HOST: 0.0.0.0
PORT: 3000
# Connect to postgres-test using internal docker network alias
DATABASE_URL: postgresql://postgres:postgres@postgres-test:5432/fundamental_test
RUST_LOG: info
RUST_BACKTRACE: "1"
ports:
- "3001:3000"
depends_on:
postgres-test:
condition: service_healthy
networks:
- test-network
networks:
test-network:

View File

@ -27,6 +27,7 @@ services:
build: build:
context: . context: .
dockerfile: services/data-persistence-service/Dockerfile dockerfile: services/data-persistence-service/Dockerfile
# Override build context to ensure ignored files are included if needed, or rely on .dockerignore
container_name: data-persistence-service container_name: data-persistence-service
environment: environment:
HOST: 0.0.0.0 HOST: 0.0.0.0
@ -84,8 +85,6 @@ services:
NATS_ADDR: nats://nats:4222 NATS_ADDR: nats://nats:4222
DATA_PERSISTENCE_SERVICE_URL: http://data-persistence-service:3000/api/v1 DATA_PERSISTENCE_SERVICE_URL: http://data-persistence-service:3000/api/v1
REPORT_GENERATOR_SERVICE_URL: http://report-generator-service:8004 REPORT_GENERATOR_SERVICE_URL: http://report-generator-service:8004
# provider_services via explicit JSON for deterministic parsing
PROVIDER_SERVICES: '["http://alphavantage-provider-service:8000", "http://tushare-provider-service:8001", "http://finnhub-provider-service:8002", "http://yfinance-provider-service:8003"]'
RUST_LOG: info,axum=info RUST_LOG: info,axum=info
RUST_BACKTRACE: "1" RUST_BACKTRACE: "1"
depends_on: depends_on:
@ -113,6 +112,8 @@ services:
SERVER_PORT: 8000 SERVER_PORT: 8000
NATS_ADDR: nats://nats:4222 NATS_ADDR: nats://nats:4222
DATA_PERSISTENCE_SERVICE_URL: http://data-persistence-service:3000/api/v1 DATA_PERSISTENCE_SERVICE_URL: http://data-persistence-service:3000/api/v1
API_GATEWAY_URL: http://api-gateway:4000
SERVICE_HOST: alphavantage-provider-service
RUST_LOG: info,axum=info RUST_LOG: info,axum=info
RUST_BACKTRACE: "1" RUST_BACKTRACE: "1"
depends_on: depends_on:
@ -136,6 +137,8 @@ services:
NATS_ADDR: nats://nats:4222 NATS_ADDR: nats://nats:4222
DATA_PERSISTENCE_SERVICE_URL: http://data-persistence-service:3000/api/v1 DATA_PERSISTENCE_SERVICE_URL: http://data-persistence-service:3000/api/v1
TUSHARE_API_URL: http://api.waditu.com TUSHARE_API_URL: http://api.waditu.com
API_GATEWAY_URL: http://api-gateway:4000
SERVICE_HOST: tushare-provider-service
RUST_LOG: info,axum=info RUST_LOG: info,axum=info
RUST_BACKTRACE: "1" RUST_BACKTRACE: "1"
depends_on: depends_on:
@ -159,6 +162,8 @@ services:
NATS_ADDR: nats://nats:4222 NATS_ADDR: nats://nats:4222
DATA_PERSISTENCE_SERVICE_URL: http://data-persistence-service:3000/api/v1 DATA_PERSISTENCE_SERVICE_URL: http://data-persistence-service:3000/api/v1
FINNHUB_API_URL: https://finnhub.io/api/v1 FINNHUB_API_URL: https://finnhub.io/api/v1
API_GATEWAY_URL: http://api-gateway:4000
SERVICE_HOST: finnhub-provider-service
RUST_LOG: info,axum=info RUST_LOG: info,axum=info
RUST_BACKTRACE: "1" RUST_BACKTRACE: "1"
depends_on: depends_on:
@ -181,6 +186,8 @@ services:
SERVER_PORT: 8003 SERVER_PORT: 8003
NATS_ADDR: nats://nats:4222 NATS_ADDR: nats://nats:4222
DATA_PERSISTENCE_SERVICE_URL: http://data-persistence-service:3000/api/v1 DATA_PERSISTENCE_SERVICE_URL: http://data-persistence-service:3000/api/v1
API_GATEWAY_URL: http://api-gateway:4000
SERVICE_HOST: yfinance-provider-service
RUST_LOG: info,axum=info RUST_LOG: info,axum=info
RUST_BACKTRACE: "1" RUST_BACKTRACE: "1"
depends_on: depends_on:
@ -219,6 +226,28 @@ services:
timeout: 5s timeout: 5s
retries: 12 retries: 12
workflow-orchestrator-service:
build:
context: .
dockerfile: services/workflow-orchestrator-service/Dockerfile
container_name: workflow-orchestrator-service
environment:
SERVER_PORT: 8005
NATS_ADDR: nats://nats:4222
DATA_PERSISTENCE_SERVICE_URL: http://data-persistence-service:3000/api/v1
RUST_LOG: info
RUST_BACKTRACE: "1"
depends_on:
- nats
- data-persistence-service
networks:
- app-network
healthcheck:
test: ["CMD-SHELL", "curl -fsS http://localhost:8005/health >/dev/null || exit 1"]
interval: 5s
timeout: 5s
retries: 12
# ================================================================= # =================================================================
# Python Services (Legacy - to be replaced) # Python Services (Legacy - to be replaced)
# ================================================================= # =================================================================

View File

@ -0,0 +1,193 @@
# 前端报告页面重构设计文档 (Frontend Refactoring Design Doc)
**日期**: 2025-11-19
**状态**: 待评审 (Draft)
**目标**: 重构 `app/report/[symbol]` 页面,消除历史技术债务,严格对齐 V2 后端微服务架构。
## 1. 核心原则
1. **单一数据源 (SSOT)**: 前端不再维护任务进度、依赖关系或倒计时。所有状态严格来自后端 API (`/api/tasks/{id}`, `/api/analysis-results`).
2. **无隐式逻辑 (No Implicit Logic)**: 严格按照用户选择的 Template ID 渲染,后端未返回的数据即视为不存在,不进行客户端推断或 Fallback。
3. **真·流式传输 (True Streaming)**: 废弃数据库轮询方案。采用 **Server-Sent Events (SSE)** 技术。
* 后端在内存中维护 `tokio::sync::broadcast` 通道。
* LLM 生成的 Token 实时推送到通道,直达前端。
* 数据库只负责存储**最终完成**的分析结果 (Persistence),不参与流式传输过程。
## 2. 页面布局设计
页面采用“固定框架 + 动态内容”的布局模式。
```text
+-----------------------------------------------------------------------+
| [Header Area] |
| Symbol: AAPL | Market: US | Price: $230.5 (Snapshot) | [Status Badge]|
| Control: [ Template Select Dropdown [v] ] [ Trigger Analysis Button ]|
+-----------------------------------------------------------------------+
| |
| [ Tab Navigation Bar ] |
| +-----------+ +--------------+ +------------+ +------------+ +-----+ |
| | 股价图表 | | 基本面数据 | | 分析模块A | | 分析模块B | | ... | |
| +-----------+ +--------------+ +------------+ +------------+ +-----+ |
| | |
+-----------------------------------------------------------------------+
| [ Main Content Area ] |
| |
| (Content changes based on selected Tab) |
| |
| SCENARIO 1: Stock Chart Tab |
| +-------------------------------------------------+ |
| | [ PLACEHOLDER: TradingView / K-Line Chart ] | |
| | (Future: Connect to Time-Series DB) | |
| +-------------------------------------------------+ |
| |
| SCENARIO 2: Fundamental Data Tab |
| +-------------------------------------------------+ |
| | Status: Waiting for Providers (2/3)... | |
| | --------------------------------------------- | |
| | [Tushare]: OK (JSON/Table Dump) | |
| | [Finnhub]: OK (JSON/Table Dump) | |
| | [AlphaV ]: Pending... | |
| +-------------------------------------------------+ |
| |
| SCENARIO 3: Analysis Module Tab (e.g., Valuation) |
| +-------------------------------------------------+ |
| | [Markdown Renderer] | |
| | ## Valuation Analysis | |
| | Based on the PE ratio of 30... | |
| | (Streaming Cursor) _ | |
| +-------------------------------------------------+ |
| |
+-----------------------------------------------------------------------+
| [ Execution Details Footer / Tab ] |
| Total Time: 12s | Tokens: 4050 | Cost: $0.02 |
+-----------------------------------------------------------------------+
```
## 3. 数据流与状态机
### 3.1 固定 Tab 定义
无论选择何种模板,以下 Tab 始终存在Fixed Tabs
1. **股价图表 (Stock Chart)**
* **数据源**: 独立的实时行情 API / 时间序列数据库。
* **当前实现**: 占位符 (Placeholder)。
2. **基本面数据 (Fundamental Data)**
* **定义**: 所有已启用的 Data Providers 返回的原始数据聚合。
* **状态逻辑**:
* 此 Tab 代表“数据准备阶段”。
* 必须等待后端 `FetchCompanyDataCommand` 对应的 Task 状态为 Completed/Partial/Failed。
* UI 展示所有 Provider 的回执。只有当所有 Provider 都有定论(成功或失败),此阶段才算结束。
* **作为后续分析的“门控”**: 此阶段未完成前,后续分析 Tab 处于“等待中”状态。
3. **执行详情 (Execution Details)**
* **定义**: 工作流的元数据汇总。
* **内容**: 耗时统计、Token 消耗、API 调用清单。
### 3.2 动态 Tab 定义 (Analysis Modules)
* **来源**: 根据当前选中的 `Template ID` 从后端获取 `AnalysisTemplateConfig`
* **生成逻辑**:
* Template 中定义了 Modules: `[Module A, Module B, Module C]`.
* 前端直接映射为 Tab A, Tab B, Tab C。
* **渲染**:
* **Loading**: 后端 `AnalysisResult` 状态为 `processing`
* **Streaming**: 通过 SSE (`/api/analysis-results/stream`) 接收增量内容。
* **Done**: 后端流结束,或直接从 DB 读取完整内容。
### 3.3 状态机 (useReportEngine Hook)
我们将废弃旧的 Hook实现一个纯粹的 `useReportEngine`
```typescript
interface ReportState {
// 1. 配置上下文
symbol: string;
templateId: string;
templateConfig: AnalysisTemplateSet | null; // 用于生成动态 Tab
// 2. 阶段状态
fetchStatus: 'idle' | 'fetching' | 'complete' | 'error'; // 基本面数据阶段
analysisStatus: 'idle' | 'running' | 'complete'; // 分析阶段
// 3. 数据持有
fundamentalData: any[]; // 来自各个 Provider 的原始数据
analysisResults: Record<string, AnalysisResultDto>; // Key: ModuleID
// 4. 进度
executionMeta: {
startTime: number;
elapsed: number;
tokens: number;
}
}
```
## 4. 交互流程
1. **初始化**:
* 用户进入页面 -> 加载 `api/configs/analysis_template_sets` -> 填充下拉框。
* 如果 URL 或历史数据中有 `template_id`,自动选中。
2. **触发 (Trigger)**:
* 用户点击“开始分析”。
* 前端 POST `/api/data-requests` (payload: `{ symbol, template_id }`)。
* **前端重置所有动态 Tab 内容为空**。
* 进入 `fetchStatus: fetching`
3. **阶段一:基本面数据获取**:
* 前端轮询 `/api/tasks/{request_id}`
* **基本面 Tab** 高亮/显示 Spinner。
* 展示各个 Provider 的子任务进度。
* 当 Task 状态 = Completed -> 进入阶段二。
4. **阶段二:流式分析 (SSE)**:
* 前端建立 EventSource 连接 `/api/analysis-results/stream?request_id={id}`
* **智能切换 Tab**: (可选) 当某个 Module 开始生成 (收到 SSE 事件 `module_start`) 时UI 可以自动切换到该 Tab。
* **渲染**: 收到 `content` 事件,追加到对应 Module 的内容中。
* **持久化**: 只有当 SSE 收到 `DONE` 事件时,后端才保证数据已落库。
5. **完成**:
* SSE 连接关闭。
* 状态转为 `complete`
## 5. 架构设计 (Architecture Design)
为了实现真流式传输,后端架构调整如下:
1. **内存状态管理 (In-Memory State)**:
* `AppState` 中增加 `stream_manager: StreamManager`
* `StreamManager` 维护 `HashMap<RequestId, BroadcastSender<StreamEvent>>`
* 这消除了对数据库的中间状态写入压力。
2. **Worker 职责**:
* Worker 执行 LLM 请求。
* 收到 Token -> 写入 `BroadcastSender` (Fire and forget)。
* 同时将 Token 累积在内存 Buffer 中。
* 生成结束 -> 将完整 Buffer 写入数据库 (PostgreSQL) -> 广播 `ModuleDone` 事件。
3. **API 职责**:
* `GET /stream`:
* 检查内存中是否有对应的 `BroadcastSender`?
* **有**: 建立 SSE 连接,订阅并转发事件。
* **无**: 检查数据库是否已完成?
* **已完成**: 一次性返回完整内容 (模拟 SSE 或直接返回 JSON)。
* **未开始/不存在**: 返回 404 或等待。
## 6. 迁移计划 (Action Items)
### 6.1 清理与归档 (Cleanup)
- [x] 创建 `frontend/archive/v1_report` 目录。
- [x] 移动 `app/report/[symbol]/components` 下的旧组件(`ExecutionDetails.tsx`, `TaskStatus.tsx`, `ReportHeader.tsx`, `AnalysisContent.tsx`)到 archive。
- [x] 移动 `app/report/[symbol]/hooks` 下的 `useAnalysisRunner.ts``useReportData.ts` 到 archive。
### 6.2 核心构建 (Core Scaffolding)
- [x] 创建 `hooks/useReportEngine.ts`: 实现上述状态机,严格对接后端 API。
- [x] 创建 `components/ReportLayout.tsx`: 实现新的布局框架Header + Tabs + Content
- [x] 创建 `components/RawDataViewer.tsx`: 用于展示基本面原始数据JSON View
- [x] 创建 `components/AnalysisViewer.tsx`: 用于展示分析结果Markdown Streaming
### 6.3 页面集成 (Integration)
- [x] 重写 `app/report/[symbol]/page.tsx`: 引入 `useReportEngine` 和新组件。
- [ ] 验证全流程Trigger -> Task Fetching -> Analysis Streaming -> Finish。
### 6.4 后端重构 (Backend Refactoring) - NEW
- [x] **State Upgrade**: 更新 `AppState` 引入 `tokio::sync::broadcast` 用于流式广播。
- [x] **Worker Update**: 修改 `run_report_generation_workflow`,不再生成完才写库,也不中间写库,而是**中间发广播,最后写库**。
- [x] **API Update**: 新增 `GET /api/analysis-results/stream` (SSE Endpoint),对接广播通道。
- [x] **Frontend Update**: 修改 `useReportEngine.ts`,将轮询 `analysis-results` 改为 `EventSource` 连接。

View File

@ -0,0 +1,148 @@
# 供应商隔离的数据新鲜度与缓存设计方案
## 1. 背景 (Background)
当前系统使用 `company_profiles` 表中的全局 `updated_at` 时间戳来判断某个股票的数据是否“新鲜”(例如:过去 24 小时内更新过)。
**现有问题:**
这种方法在多供应商Multi-Provider环境中会导致严重的竞态条件Race Condition
1. **Tushare**A股数据源通常响应较快获取数据并更新了 `company_profiles` 表的 `updated_at`
2. `updated_at` 时间戳被更新为 `NOW()`
3. **YFinance****AlphaVantage**(全球数据源)稍后启动任务。
4. 它们检查 `company_profiles` 表,发现 `updated_at` 非常新,因此错误地认为**自己的**数据也是最新的。
5. 结果YFinance/AlphaVantage 跳过执行,导致这些特定字段的数据为空或陈旧。
## 2. 目标 (Objective)
实现一个**供应商隔离的缓存机制**允许每个数据供应商Tushare, YFinance, AlphaVantage, Finnhub能够
1. 独立追踪其最后一次成功更新数据的时间。
2. 仅根据**自己的**数据新鲜度来决定是否执行任务。
3. 避免干扰其他供应商的执行逻辑。
## 3. 设计原则 (Design Principles)
1. **不新增数据表**:利用数据库现有的文档-关系混合特性Document-Relational。具体来说使用 `company_profiles` 表中的 `additional_info` (JSONB) 字段。
2. **服务层抽象**:解析和管理这些元数据的复杂性应封装在 `Data Persistence Service` 内部,向各 Provider Service 暴露简洁的 API。
3. **并发安全**:确保不同供应商的并发更新不会覆盖彼此的元数据状态。
## 4. 数据结构设计 (Data Structure Design)
我们将利用现有的 `company_profiles.additional_info` 字段(类型:`JSONB`)来存储一个供应商状态字典。
### `additional_info` JSON Schema 设计
```json
{
"provider_status": {
"tushare": {
"last_updated": "2025-11-19T10:00:00Z",
"data_version": "v1",
"status": "success"
},
"yfinance": {
"last_updated": "2025-11-18T09:30:00Z",
"status": "success"
},
"alphavantage": {
"last_updated": "2025-11-15T14:00:00Z",
"status": "partial_success" // 例如:触发了速率限制
}
},
"other_metadata": "..." // 保留其他现有元数据
}
```
## 5. 实施计划 (Implementation Plan)
### 5.1. 数据持久化服务更新 (Data Persistence Service)
我们需要扩展 `PersistenceClient` 及其底层 API以支持细粒度的元数据更新。
**新增/更新 API 端点:**
1. **`PUT /companies/{symbol}/providers/{provider_id}/status`** (新增)
* **目的**:原子更新特定供应商的状态,无需读取/写入完整的 profile。
* **实现**:使用 Postgres 的 `jsonb_set` 函数,直接更新 JSON 路径 `['provider_status', provider_id]`
* **Payload**:
```json
{
"last_updated": "2025-11-19T12:00:00Z",
"status": "success"
}
```
2. **`GET /companies/{symbol}/providers/{provider_id}/status`** (新增)
* **目的**:辅助接口,用于获取特定供应商的当前缓存状态。
### 5.2. 供应商服务工作流更新 (Provider Service)
每个 Provider Service例如 `yfinance-provider-service`)将修改其 `worker.rs` 中的逻辑:
**现有逻辑(有缺陷):**
```rust
let profile = client.get_company_profile(symbol).await?;
if profile.updated_at > 24h_ago { return; } // 全局检查
```
**新逻辑:**
```rust
// 1. 检查 Provider 专属缓存
let status = client.get_provider_status(symbol, "yfinance").await?;
if let Some(s) = status {
if s.last_updated > 24h_ago {
info!("YFinance 数据较新,跳过执行。");
return;
}
}
// 2. 获取并持久化数据
// ... fetch ...
client.upsert_company_profile(profile).await?; // 更新基本信息
client.batch_insert_financials(financials).await?;
// 3. 更新 Provider 状态
client.update_provider_status(symbol, "yfinance", ProviderStatus {
last_updated: Utc::now(),
status: "success"
}).await?;
```
## 6. 风险管理与迁移 (Risk Management & Migration)
* **竞态条件 (Race Conditions)**:通过在数据库层使用 `jsonb_set` 进行部分更新,我们避免了“读-改-写”的竞态条件,确保 Provider A 的更新不会覆盖 Provider B 同时写入的状态。
* **数据迁移 (Migration)**
* **策略****Lazy Migration (懒迁移)**。
* 现有数据中没有 `provider_status` 字段。代码将优雅地处理 `null` 或缺失键的情况(将其视为“陈旧/从未运行”,触发重新获取)。
* **无需**编写专门的 SQL 迁移脚本去清洗历史数据。旧数据会随着新的抓取任务运行而自动补充上状态信息。
* 如果必须清理,可以直接执行 `UPDATE company_profiles SET additional_info = additional_info - 'provider_status';` 来重置所有缓存状态。
## 7. 实施清单 (Implementation Checklist)
- [x] **Phase 1: Common Contracts & DTOs**
- [x] 在 `services/common-contracts/src/dtos.rs` 中定义 `ProviderStatusDto`.
- [x] **Phase 2: Data Persistence Service API**
- [x] 实现 DB 层逻辑: `get_provider_status` (读取 JSONB).
- [x] 实现 DB 层逻辑: `update_provider_status` (使用 `jsonb_set`).
- [x] 添加 API Handler: `GET /companies/{symbol}/providers/{provider_id}/status`.
- [x] 添加 API Handler: `PUT /companies/{symbol}/providers/{provider_id}/status`.
- [x] 注册路由并测试接口.
- [x] **Phase 3: Client Logic Update**
- [x] 更新各服务中的 `PersistenceClient` (如 `services/yfinance-provider-service/src/persistence.rs` 等),增加 `get_provider_status``update_provider_status` 方法.
- [x] **Phase 4: Provider Services Integration**
- [x] **Tushare Service**: 更新 `worker.rs`,集成新的缓存检查逻辑.
- [x] **YFinance Service**: 更新 `worker.rs`,集成新的缓存检查逻辑.
- [x] **AlphaVantage Service**: 更新 `worker.rs`,集成新的缓存检查逻辑.
- [x] **Finnhub Service**: 更新 `worker.rs`,集成新的缓存检查逻辑.
- [ ] **Phase 5: Verification (验证)**
- [ ] 运行 `scripts/test_data_fetch.py` 验证全流程.
- [ ] 验证不同 Provider 的状态互不干扰.
- [ ] **Phase 6: Caching Logic Abstraction (缓存逻辑抽象 - 智能客户端)**
- [ ] 将 `PersistenceClient` 迁移至 `services/common-contracts/src/persistence_client.rs`(或新建 `service-sdk` 库),消除重复代码。
- [ ] 在共享客户端中实现高层方法 `should_fetch_data(symbol, provider, ttl)`
- [ ] 重构所有 Provider Service 以使用共享的 `PersistenceClient`
- [ ] 验证所有 Provider 的缓存逻辑是否一致且无需手动实现。

View File

@ -0,0 +1,128 @@
# 报告生成优化与 UI 状态反馈改进设计文档
**状态**: Draft
**日期**: 2025-11-19
**涉及模块**: Report Generator Service (Backend), Frontend (UI)
## 1. 背景与问题分析
当前系统的报告生成流程存在两个主要痛点,导致用户体验不佳且生成内容质量低下:
1. **数据注入缺失 (Data Injection Gap)**:
* 后端在执行 Prompt 渲染时,`financial_data` 被硬编码为 `"..."`
* 大模型LLM缺乏上下文输入导致输出“幻觉”内容如自我介绍、复读指令或通用废话。
* 依赖链条虽然在拓扑排序上是正确的,但由于上游(如“基本面分析”)输出无效内容,下游(如“最终结论”)的输入也随之失效。
2. **UI 状态反馈缺失 (UI/UX Gap)**:
* 前端仅有简单的“有数据/无数据”判断。
* 点击“重新生成”时UI 往往显示旧的缓存数据,缺乏“生成中”或“进度更新”的实时反馈。
* 用户无法区分“旧报告”和“正在生成的新报告”。
## 2. 后端优化设计 (Report Generator Service)
### 2.1 数据注入逻辑修复 (Fixing Financial Data Injection)
我们将把当前的“基本面数据获取”视为一个**内置的基础工具Native Tool**。
* **当前逻辑**: 直接透传数据库 Raw Data。
* **改进逻辑**: 在 `worker.rs` 中实现一个数据格式化器,将 `Vec<TimeSeriesFinancialDto>` 转换为 LLM 易读的 Markdown 表格或结构化文本。
**实现细节**:
1. **格式化函数**: 实现 `format_financials_to_markdown(financials: &[TimeSeriesFinancialDto]) -> String`
* 按年份/季度降序排列。
* 提取关键指标营收、净利润、ROE、毛利率等
* 生成 Markdown Table。
2. **注入 Context**:
* 在 `Tera` 模板渲染前,调用上述函数。
* 替换占位符: `context.insert("financial_data", &formatted_data);`
3. **上游依赖注入 (保持不变)**:
* 继续保留现有的 `generated_results` 注入逻辑,确保上游模块(如 `market_analysis`)的输出能正确传递给下游(如 `final_conclusion`)。
### 2.2 执行状态管理 (Execution Status Management)
为了支持前端的“实时状态”,后端需要能够区分“排队中”、“生成中”和“已完成”。
* **现状**: 只有生成完成后才写入 `analysis_results` 表。
* **改进**: 引入任务状态流转。
**方案 A (基于数据库 - 推荐 MVP)**:
利用现有的 `analysis_results` 表或新建 `analysis_tasks` 表。
1. **任务开始时**:
* Worker 开始处理某个 `module_id` 时,立即写入/更新一条记录。
* `status`: `PROCESSING`
* `content`: 空或 "Analysis in progress..."
2. **任务完成时**:
* 更新记录。
* `status`: `COMPLETED`
* `content`: 实际生成的 Markdown。
3. **任务失败时**:
* `status`: `FAILED`
* `content`: 错误信息。
### 2.3 未来扩展性:工具模块 (Future Tool Module)
* 当前设计中,`financial_data` 是硬编码注入的。
* **未来规划**: 在 Prompt 模板配置中,增加 `tools` 字段。
```json
"tools": ["financial_aggregator", "news_search", "calculator"]
```
* Worker 在渲染 Prompt 前,先解析 `tools` 配置,并行执行对应的工具函数(如 Python 数据清洗脚本),获取输出后注入 Context。当前修复的 `financial_data` 本质上就是 `financial_aggregator` 工具的默认实现。
## 3. 前端优化设计 (Frontend)
### 3.1 状态感知与交互
**目标**: 让用户清晰感知到“正在生成”。
1. **重新生成按钮行为**:
* 点击“重新生成”后,**立即**将当前模块的 UI 状态置为 `GENERATING`
* **视觉反馈**:
* 方案一(简单):清空旧内容,显示 Skeleton骨架屏+ 进度条/Spinner。
* 方案二(平滑):保留旧内容,但在上方覆盖一层半透明遮罩,并显示“正在更新分析...”。(推荐方案二,避免内容跳动)。
2. **状态轮询 (Polling)**:
* 由于后端暂未实现 SSE (Server-Sent Events),前端需采用轮询机制。
* 当状态为 `GENERATING` 时,每隔 2-3 秒调用一次 API 检查该 `module_id` 的状态。
* 当后端返回状态变更为 `COMPLETED` 时,停止轮询,刷新显示内容。
### 3.2 组件结构调整
修改 `AnalysisContent.tsx` 组件:
```typescript
interface AnalysisState {
status: 'idle' | 'loading' | 'success' | 'error';
data: string | null; // Markdown content
isStale: boolean; // 标记当前显示的是否为旧缓存
}
```
* **Idle**: 初始状态。
* **Loading**: 点击生成后,显示加载动画。
* **Success**: 获取到新数据。
* **IsStale**: 点击重新生成瞬间,将 `isStale` 设为 true。UI 上可以给旧文本加灰色滤镜,直到新数据到来。
## 4. 实施计划 (Action Plan)
### Phase 1: 后端数据修正 (Backend Core)
- [ ] 修改 `services/report-generator-service/src/worker.rs`
- [ ] 实现 `format_financial_data` 辅助函数。
- [ ] 将格式化后的数据注入 Tera Context。
- [ ] 验证大模型输出不再包含“幻觉”文本。
### Phase 2: 后端状态透出 (Backend API)
- [ ] 确认 `NewAnalysisResult` 或相关 DTO 是否支持状态字段。
- [ ] 在 Worker 开始处理模块时,写入 `PROCESSING` 状态到数据库。
- [ ] 确保 API 查询接口能返回 `status` 字段。
### Phase 3: 前端体验升级 (Frontend UI)
- [ ] 修改 `AnalysisContent.tsx`,增加对 `status` 字段的处理。
- [ ] 实现“重新生成”时的 UI 遮罩或 Loading 状态,不再单纯依赖 `useQuery` 的缓存。
- [ ] 优化 Markdown 渲染区的用户体验。
## 5. 验收标准 (Acceptance Criteria)
1. **内容质量**: 市场分析、基本面分析报告中包含具体的财务数字(如营收、利润),且引用正确,不再出现“请提供数据”的字样。
2. **流程闭环**: 点击“重新生成”UI 显示加载状态 -> 后端处理 -> UI 自动刷新为新内容。
3. **无闪烁**: 页面不会因为轮询而频繁闪烁,状态切换平滑。

View File

@ -0,0 +1,225 @@
# 架构重构设计文档:引入 Workflow Orchestrator
## 1. 背景与目标
当前系统存在 `api-gateway` 职责过载、业务逻辑分散、状态机隐式且脆弱、前后端状态不同步等核心问题。为了彻底解决这些架构痛点,本设计提出引入 **Workflow Orchestrator Service**,作为系统的“大脑”,负责集中管理业务流程、状态流转与事件协调。
### 核心目标
1. **解耦 (Decoupling)**: 将业务协调逻辑从 `api-gateway` 剥离Gateway 回归纯粹的流量入口和连接管理职责。
2. **状态一致性 (Consistency)**: 建立单一事实来源 (Single Source of Truth),所有业务状态由 Orchestrator 统一维护并广播。
3. **细粒度任务编排 (Fine-Grained Orchestration)**: 废除粗粒度的“阶段”概念,转向基于 DAG (有向无环图) 的任务编排。后端只负责执行任务和广播每个任务的状态,前端根据任务状态自由决定呈现逻辑。
## 2. 架构全景图 (Architecture Overview)
### 2.1 服务角色重定义
| 服务 | 现有职责 | **新职责** |
| :--- | :--- | :--- |
| **API Gateway** | 路由, 鉴权, 注册发现, 业务聚合, 流程触发 | 路由, 鉴权, 注册发现, **SSE/WS 代理 (Frontend Proxy)** |
| **Workflow Orchestrator** | *(新服务)* | **DAG 调度**, **任务依赖管理**, **事件广播**, **状态快照** |
| **Data Providers** | 数据抓取, 存库, 发 NATS 消息 | (保持不变) 接收指令 -> 干活 -> 发结果事件 |
| **Report Generator** | 报告生成, 发 NATS 消息 | (保持不变) 接收指令 -> 干活 -> 发进度/结果事件 |
| **Data Processors** | *(新服务类型)* | **数据清洗/转换** (接收上下文 -> 转换 -> 更新上下文) |
### 2.2 数据流向 (Data Flow)
1. **启动**: 前端 -> Gateway (`POST /start`) -> **Orchestrator** (NATS: `StartWorkflow`)
2. **调度**: **Orchestrator** 解析模板构建 DAG -> NATS: 触发无依赖的 Tasks (如 Data Fetching)
3. **反馈**: Executors (Providers/ReportGen/Processors) -> NATS: `TaskCompleted` -> **Orchestrator**
4. **流转**: **Orchestrator** 检查依赖 -> NATS: 触发下一层 Tasks
5. **广播**: **Orchestrator** -> NATS: `WorkflowEvent` (Task Status Updates) -> Gateway -> 前端 (SSE)
## 3. 接口与协议定义 (Contracts & Schemas)
需在 `services/common-contracts` 中进行以下调整:
### 3.1 新增 Commands (NATS Subject: `workflow.commands.*`)
```rust
// Topic: workflow.commands.start
#[derive(Serialize, Deserialize, Debug)]
pub struct StartWorkflowCommand {
pub request_id: Uuid,
pub symbol: CanonicalSymbol,
pub market: String,
pub template_id: String,
}
// 新增:用于手动请求状态对齐 (Reconnect Scenario)
// Topic: workflow.commands.sync_state
#[derive(Serialize, Deserialize, Debug)]
pub struct SyncStateCommand {
pub request_id: Uuid,
}
```
### 3.2 新增 Events (NATS Subject: `events.workflow.{request_id}`)
这是前端唯一需要订阅的流。
```rust
// Topic: events.workflow.{request_id}
#[derive(Serialize, Deserialize, Debug)]
#[serde(tag = "type", content = "payload")]
pub enum WorkflowEvent {
// 1. 流程初始化 (携带完整的任务依赖图)
WorkflowStarted {
timestamp: i64,
// 定义所有任务及其依赖关系,前端可据此绘制流程图或进度条
task_graph: WorkflowDag
},
// 2. 任务状态变更 (核心事件)
TaskStateChanged {
task_id: String, // e.g., "fetch:tushare", "process:clean_financials", "module:swot_analysis"
task_type: TaskType, // DataFetch | DataProcessing | Analysis
status: TaskStatus, // Pending, Scheduled, Running, Completed, Failed, Skipped
message: Option<String>,
timestamp: i64
},
// 3. 任务流式输出 (用于 LLM 打字机效果)
TaskStreamUpdate {
task_id: String,
content_delta: String,
index: u32
},
// 4. 流程整体结束
WorkflowCompleted {
result_summary: serde_json::Value,
end_timestamp: i64
},
WorkflowFailed {
reason: String,
is_fatal: bool,
end_timestamp: i64
},
// 5. 状态快照 (用于重连/丢包恢复)
// 当前端重连或显式发送 SyncStateCommand 时Orchestrator 发送此事件
WorkflowStateSnapshot {
timestamp: i64,
task_graph: WorkflowDag,
tasks_status: HashMap<String, TaskStatus>, // 当前所有任务的最新状态
tasks_output: HashMap<String, Option<String>> // (可选) 已完成任务的关键输出摘要
}
}
#[derive(Serialize, Deserialize, Debug)]
pub struct WorkflowDag {
pub nodes: Vec<TaskNode>,
pub edges: Vec<TaskDependency> // from -> to
}
#[derive(Serialize, Deserialize, Debug)]
pub struct TaskNode {
pub id: String,
pub name: String,
pub type: TaskType,
pub initial_status: TaskStatus
}
#[derive(Serialize, Deserialize, Debug, PartialEq)]
pub enum TaskType {
DataFetch, // 创造原始上下文
DataProcessing, // 消耗并转换上下文 (New)
Analysis // 读取上下文生成新内容
}
#[derive(Serialize, Deserialize, Debug, PartialEq)]
pub enum TaskStatus {
Pending, // 等待依赖
Scheduled, // 依赖满足,已下发给 Worker
Running, // Worker 正在执行
Completed, // 执行成功
Failed, // 执行失败
Skipped // 因上游失败或策略原因被跳过
}
```
### 3.3 调整现有 Messages
* **`FetchCompanyDataCommand`**: Publisher 变更为 `Workflow Orchestrator`
* **`GenerateReportCommand`**: Publisher 变更为 `Workflow Orchestrator`
## 4. Workflow Orchestrator 内部设计
### 4.1 DAG 调度器 (DAG Scheduler)
每个 `request_id` 对应一个 DAG 实例。
1. **初始化**: 根据 `TemplateID` 读取配置。
* 创建 Data Fetch Tasks (作为 DAG 的 Root Nodes)。
* 创建 Analysis Module Tasks (根据 `dependencies` 配置连接边)。
2. **依赖检查**:
* 监听 Task 状态变更。
* 当 Task A 变成 `Completed` -> 检查依赖 A 的 Task B。
* 如果 Task B 的所有依赖都 `Completed` -> 触发 Task B。
* 如果 Task A `Failed` -> 将依赖 A 的 Task B 标记为 `Skipped` (除非有容错策略)。
### 4.2 状态对齐机制 (State Alignment / Snapshot)
为了解决前端刷新或网络丢包导致的状态不一致:
1. **主动推送快照 (On Connect)**:
* Gateway 在前端建立 SSE 连接时,向 Orchestrator 发送 `SyncStateCommand`
* Orchestrator 收到命令后,将当前内存中的完整 DAG 状态打包成 `WorkflowStateSnapshot` 事件发送。
2. **前端合并逻辑**:
* 前端收到 Snapshot 后,全量替换本地的任务状态树。
* 如果 Snapshot 显示某任务 `Running`,前端恢复 Loading 动画。
* 如果 Snapshot 显示某任务 `Completed`,前端渲染结果。
### 4.3 容错策略 (Policy)
Orchestrator 需要内置策略来处理非二元结果。
* **Data Fetch Policy**: 并非所有 Data Fetch 必须成功。可以配置 "At least one data source" 策略。如果满足策略Orchestrator 将下游的 Analysis Task 依赖视为满足。
## 5. 实施步骤 (Implementation Checklist)
### Phase 1: Contract & Interface
- [x] **Update common-contracts**:
- [x] Add `StartWorkflowCommand` and `SyncStateCommand`.
- [x] Add `WorkflowEvent` enum (incl. Started, StateChanged, StreamUpdate, Completed, Failed, Snapshot).
- [x] Add `WorkflowDag`, `TaskNode`, `TaskType`, `TaskStatus` structs.
- [x] Update publishers for `FetchCompanyDataCommand` and `GenerateReportCommand`.
- [x] Bump version and publish crate.
### Phase 2: Workflow Orchestrator Service (New)
- [x] **Scaffold Service**:
- [x] Create new Rust service `services/workflow-orchestrator-service`.
- [x] Setup `Dockerfile`, `Cargo.toml`, and `main.rs`.
- [x] Implement NATS connection and multi-topic subscription.
- [x] **Core Logic - State Machine**:
- [x] Implement `WorkflowState` struct (InMemory + Redis/DB persistence optional for MVP).
- [x] Implement `DagScheduler`: Logic to parse template and build dependency graph.
- [x] **Core Logic - Handlers**:
- [x] Handle `StartWorkflowCommand`: Init DAG, fire initial tasks.
- [x] Handle `TaskCompleted` events (from Providers/ReportGen): Update DAG, trigger next tasks.
- [x] Handle `SyncStateCommand`: Serialize current state and emit `WorkflowStateSnapshot`.
- [x] **Policy Engine**:
- [x] Implement "At least one provider" policy for data fetching.
### Phase 3: API Gateway Refactoring
- [x] **Remove Legacy Logic**:
- [x] Delete `aggregator.rs` completely.
- [x] Remove `trigger_data_fetch` aggregation logic.
- [x] Remove `/api/tasks` polling endpoint.
- [x] **Implement Proxy Logic**:
- [x] Add `POST /api/v2/workflow/start` -> Publishes `StartWorkflowCommand`.
- [x] Add `GET /api/v2/workflow/events/{id}` -> Subscribes to NATS, sends `SyncStateCommand` on open, proxies events to SSE.
### Phase 4: Integration & Frontend
- [x] **Docker Compose**: Add `workflow-orchestrator-service` to stack.
- [x] **Frontend Adapter**:
- [x] **Type Definitions**: Define `WorkflowEvent`, `WorkflowDag`, `TaskStatus` in `src/types/workflow.ts`.
- [x] **API Proxy**: Implement Next.js Route Handlers for `POST /workflow/start` and `GET /workflow/events/{id}` (SSE).
- [x] **Core Logic (`useWorkflow`)**:
- [x] Implement SSE connection management with auto-reconnect.
- [x] Handle `WorkflowStarted`, `TaskStreamUpdate`, `WorkflowCompleted`.
- [x] Implement state restoration via `WorkflowStateSnapshot`.
- [x] **UI Components**:
- [x] `WorkflowVisualizer`: Task list and status tracking.
- [x] `TaskOutputViewer`: Markdown-rendered stream output.
- [x] `WorkflowReportLayout`: Integrated analysis page layout.
- [x] **Page Integration**: Refactor `app/report/[symbol]/page.tsx` to use the new workflow engine.
---
*Updated: 2025-11-20 - Added Implementation Checklist*

View File

@ -0,0 +1,175 @@
# 架构修订:基于会话的数据快照与分层存储 (Session-Based Data Snapshotting)
## 1. 核心理念修订 (Core Philosophy Refinement)
基于您的反馈,我们修正了架构的核心逻辑,将数据明确划分为两类,并采取不同的存储策略。
### 1.1 数据分类 (Data Classification)
1. **客观历史数据 (Objective History / Time-Series)**
* **定义**: 股价、成交量、K线图等交易数据。
* **特性**: "出现即历史",不可篡改,全球唯一。
* **存储策略**: **全局共享存储**。不需要按 Session 隔离,不需要存多份。
* **表**: 现有的 `daily_market_data` (TimescaleDB) 保持不变。
2. **观测型数据 (Observational Data / Fundamentals)**
* **定义**: 财务报表、公司简介、以及 Provider 返回的原始非结构化或半结构化信息。
* **特性**: 不同来源Providers说法不一可能随时间修正Restatement分析依赖于“当时”获取的版本。
* **存储策略**: **基于 Session 的快照存储**。每一次 Session 都必须保存一份当时获取的原始数据的完整副本。
* **表**: 新增 `session_raw_data` 表。
### 1.2 解决的问题
* **会话隔离**: 新的 Session 拥有自己独立的一套基础面数据,不受历史 Session 干扰,也不污染未来 Session。
* **历史回溯**: 即使 Provider 变了,查看历史 Report 时,依然能看到当时是基于什么数据得出的结论。
* **数据清洗解耦**: 我们现在只负责“收集并快照”不负责“清洗和聚合”。复杂的清洗逻辑WASM/AI留待后续模块处理。
---
## 2. 数据库架构设计 (Schema Design)
### 2.1 新增:会话原始数据表 (`session_raw_data`)
这是本次架构调整的核心。我们不再试图把财务数据强行塞进一个全局唯一的标准表,而是忠实记录每个 Provider 在该 Session 中返回的内容。
```sql
CREATE TABLE session_raw_data (
id BIGSERIAL PRIMARY KEY,
request_id UUID NOT NULL, -- 关联的 Session ID
symbol VARCHAR(32) NOT NULL,
provider VARCHAR(64) NOT NULL, -- e.g., 'tushare', 'alphavantage'
data_type VARCHAR(32) NOT NULL, -- e.g., 'financial_statements', 'company_profile'
-- 核心:直接存储 Provider 返回的(或稍微标准化的)完整 JSON
data_payload JSONB NOT NULL,
created_at TIMESTAMPTZ DEFAULT NOW(),
-- 索引:为了快速查询某次 Session 的数据
CONSTRAINT fk_request_id FOREIGN KEY (request_id) REFERENCES requests(id) ON DELETE CASCADE
);
CREATE INDEX idx_session_data_req ON session_raw_data(request_id);
```
### 2.2 新增:供应商缓存表 (`provider_response_cache`)
为了优化性能和节省 API 调用次数,我们在全局层引入缓存。但请注意:**缓存仅作为读取源,不作为 Session 的存储地。**
```sql
CREATE TABLE provider_response_cache (
cache_key VARCHAR(255) PRIMARY KEY, -- e.g., "tushare:AAPL:financials"
data_payload JSONB NOT NULL,
updated_at TIMESTAMPTZ DEFAULT NOW(),
expires_at TIMESTAMPTZ NOT NULL
);
```
### 2.3 保持不变:市场数据表 (`daily_market_data`)
* 继续使用 TimescaleDB 存储 `open`, `high`, `low`, `close`, `volume`
* 所有 Session 共享读取此表。
---
## 3. 数据流转逻辑 (Data Lifecycle)
### Phase 1: Session 启动与数据获取 (Acquisition)
1. **Start**: API Gateway 生成 `request_id`
2. **Fetch & Cache Logic (在 Provider Service 中执行)**:
* Provider 收到任务 (Symbol: AAPL)。
* **Check Cache**: 查询 `provider_response_cache`
* *Hit*: 拿出现成的 JSON。
* *Miss*: 调用外部 API获得 JSON写入 Cache (设置过期时间如 24h)。
3. **Snapshot (关键步骤)**:
* Provider 将拿到的 JSON (无论来自 Cache 还是 API),作为一条**新记录**写入 `session_raw_data`
* 字段: `request_id=UUID`, `provider=tushare`, `data=JSON`
### Phase 2: 展示与分析 (Consumption)
1. **Frontend Raw View (UI)**:
* 前端调用 `GET /api/v1/session/{request_id}/raw-data`
* 后端 `SELECT * FROM session_raw_data WHERE request_id = ...`。
* UI 依然可以使用之前的 Accordion 结构,展示 "Tushare: Financials", "AlphaVantage: Profile"。这就是用户看到的“本次调查的原始底稿”。
2. **Analysis (LLM)**:
* Report Generator 获取 `request_id` 对应的所有 raw data。
* 将这些 Raw Data 作为 Context 喂给 LLM。
* (未来扩展): 在这一步之前,插入一个 "Data Cleaning Agent/Wasm",读取 raw data输出 clean data再喂给 LLM。
### Phase 3: 归档与清理 (Cleanup)
* **Session Deletion**: 当我们需要清理某个历史 Session 时,只需 `DELETE FROM session_raw_data WHERE request_id = ...`
* **副作用**: 零。因为 `daily_market_data` 是共享的(留着也没事),而 Session 独享的 `raw_data` 被彻底删除了。
---
## 4. 实施路线图 (Implementation Roadmap)
1. **Database Migration**:
* 创建 `session_raw_data` 表。
* 创建 `provider_response_cache` 表。
* (清理旧表): 废弃 `time_series_financials` 表(原计划用于存标准化的财务指标,现在确认不需要。我们只存 `session_raw_data` 中的原始基本面数据,财务报表由原始数据动态推导)。
* **保留** `daily_market_data`存储股价、K线等客观时间序列数据保持全局共享
2. **Provider Services**:
* 引入 Cache 检查逻辑。
* 修改输出逻辑:不再尝试 Upsert 全局表,而是 Insert `session_raw_data`
3. **Frontend Refactor**:
* 修改 `RawDataViewer` 的数据源,从读取“最后一次更新”改为读取“当前 Session 的 Raw Data”。
* 这完美解决了“刷新页面看到旧数据”的问题——如果是一个新 Session ID它的 `session_raw_data` 一开始是空的UI 就会显示为空/Loading直到新的 Snapshot 写入。
4. **Future Extensibility (Aggregation)**:
* 当前架构下Frontend 直接展示 Raw Data。
* 未来:新增 `DataProcessorService`。它监听 "Data Fetched" 事件,读取 `session_raw_data`,执行聚合逻辑,将结果写入 `session_clean_data` (假想表),供 UI 显示“完美报表”。
---
## 5. Step-by-Step Task List
### Phase 1: Data Persistence Service & Database (Foundation)
- [x] **Task 1.1**: Create new SQL migration file.
- Define `session_raw_data` table (Columns: `id`, `request_id`, `symbol`, `provider`, `data_type`, `data_payload`, `created_at`).
- Define `provider_response_cache` table (Columns: `cache_key`, `data_payload`, `updated_at`, `expires_at`).
- (Optional) Rename `time_series_financials` to `_deprecated_time_series_financials` to prevent accidental usage.
- [x] **Task 1.2**: Run SQL migration (`sqlx migrate run`).
- [x] **Task 1.3**: Implement `db/session_data.rs` in Data Persistence Service.
- Function: `insert_session_data(pool, request_id, provider, data_type, payload)`.
- Function: `get_session_data(pool, request_id)`.
- [x] **Task 1.4**: Implement `db/provider_cache.rs` in Data Persistence Service.
- Function: `get_cache(pool, key) -> Option<Payload>`.
- Function: `set_cache(pool, key, payload, ttl)`.
- [x] **Task 1.5**: Expose new API endpoints in `api/`.
- `POST /api/v1/session-data` (Internal use by Providers).
- `GET /api/v1/session-data/:request_id` (Used by ReportGen & Frontend).
- `GET/POST /api/v1/provider-cache` (Internal use by Providers).
### Phase 2: Common Contracts & SDK (Glue Code)
- [x] **Task 2.1**: Update `common-contracts`.
- Add DTOs for `SessionData` and `CacheEntry`.
- Update `PersistenceClient` struct to include methods for calling new endpoints (`save_session_data`, `get_cache`, `set_cache`).
### Phase 3: Provider Services (Logic Update)
- [x] **Task 3.1**: Refactor `tushare-provider-service`.
- Update Worker to check Cache first.
- On Cache Miss: Call Tushare API -> Save to Cache.
- **Final Step**: Post data to `POST /api/v1/session-data` (instead of old batch insert).
- Ensure `request_id` is propagated correctly.
- [x] **Task 3.2**: Refactor `alphavantage-provider-service` (same logic).
- [x] **Task 3.3**: Refactor `yfinance-provider-service` (same logic).
- [x] **Task 3.4**: Verify `FinancialsPersistedEvent` is still emitted (or similar event) to trigger Gateway aggregation.
### Phase 4: API Gateway & Report Generator (Consumption)
- [x] **Task 4.1**: Update `api-gateway` routing.
- Proxy `GET /api/v1/session-data/:request_id` for Frontend.
- [x] **Task 4.2**: Update `report-generator-service`.
- In `worker.rs`, change data fetching logic.
- Instead of `get_financials_by_symbol`, call `get_session_data(request_id)`.
- Pass the raw JSON list to the LLM Context Builder.
### Phase 5: Frontend (UI Update)
- [x] **Task 5.1**: Update `useReportEngine.ts`.
- Change polling/fetching logic to request `GET /api/v1/session-data/${requestId}`.
- [x] **Task 5.2**: Update `RawDataViewer.tsx`.
- Adapt to new data structure (List of `{ provider, data_type, payload }`).
- Ensure the UI correctly groups these raw snapshots by Provider.

View File

@ -0,0 +1,110 @@
# 动态服务注册与发现机制设计方案 (Dynamic Service Registration & Discovery Proposal)
## 1. 问题陈述 (Problem Statement)
目前的 **API Gateway** 依赖于静态配置(环境变量中的 `provider_services` 映射表)来获知可用的数据提供商服务 (Data Provider Services)。
* **脆弱性 (Brittleness)**: 增加或迁移 Provider 需要修改 Gateway 配置并重启。
* **缺乏健康感知 (Lack of Health Awareness)**: Gateway 会盲目地尝试连接配置的 URL。如果某个服务挂了但配置还在请求会遭遇超时或连接错误。
* **运维复杂 (Operational Complexity)**: 手动管理 URL 既机械又容易出错。
## 2. 解决方案:动态注册系统 (Dynamic Registration System)
我们将实施**服务注册 (Service Registry)** 模式,由 API Gateway 充当注册中心。
### 2.1. "注册" 生命周期
1. **启动 (Startup)**: 当一个 Provider Service (例如 Tushare) 启动时,它向 API Gateway 发送 `POST /v1/registry/register` 请求。
* 载荷包括:服务 ID、基础 URL、能力标识如 "tushare")。
2. **存活心跳 (Liveness/Heartbeat)**: Provider Service 运行一个后台任务,每隔 **N 秒** (建议 **10秒**) 发送一次 `POST /v1/registry/heartbeat`
* **注意**: 由于我们主要在本地容器网络运行,网络开销极低,我们可以使用较短的心跳周期(如 10秒来实现快速的故障检测。
3. **发现 (Discovery)**: API Gateway 在内存中维护活跃服务列表。
* 如果超过 **2 * N 秒** (如 20秒) 未收到心跳,该服务将被标记为“不健康”或被移除。
4. **关闭 (Shutdown)**: 在优雅退出 (Graceful Shutdown, SIGTERM/SIGINT) 时Provider 发送 `POST /v1/registry/deregister`
### 2.2. 架构变更
#### A. 共享契约 (`common-contracts`)
定义注册所需的数据结构。
```rust
// services/common-contracts/src/registry.rs
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ServiceRegistration {
pub service_id: String, // 唯一ID, 例如 "tushare-provider-1"
pub service_name: String, // 类型, 例如 "tushare"
pub base_url: String, // 例如 "http://10.0.1.5:8000"
pub health_check_url: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Heartbeat {
pub service_id: String,
pub status: ServiceStatus, // Active, Degraded
}
```
#### B. API Gateway (`api-gateway`)
* **新组件**: `ServiceRegistry` (带 TTL 的线程安全 Map)。
* **新接口**:
* `POST /v1/registry/register`: 添加/更新条目。
* `POST /v1/registry/heartbeat`: 刷新 TTL。
* `POST /v1/registry/deregister`: 移除条目。
* **逻辑变更**: `get_task_progress``trigger_data_fetch` 将不再读取静态配置,而是查询动态的 `ServiceRegistry`
#### C. Provider Services (`*-provider-service`)
我们需要一个统一的机制来处理这个生命周期。
建议在 `common-contracts` 中引入一个标准的生命周期处理模块。
**建议的 Trait / 辅助结构体:**
```rust
// services/common-contracts/src/lifecycle.rs (New)
pub struct ServiceRegistrar {
gateway_url: String,
registration: ServiceRegistration,
// ...
}
impl ServiceRegistrar {
/// 注册服务 (重试直到成功)
pub async fn register(&self) -> Result<()>;
/// 启动后台心跳循环 (10s 间隔)
pub async fn start_heartbeat_loop(&self);
/// 注销服务
pub async fn deregister(&self) -> Result<()>;
}
```
## 3. 实施计划 (TODO List)
### Phase 1: 基础建设 (Infrastructure)
* [ ] **Task 1.1 (Contracts)**: 在 `services/common-contracts` 中创建 `registry.rs`,定义 `ServiceRegistration``Heartbeat` 结构体。
* [ ] **Task 1.2 (Library)**: 在 `services/common-contracts` 中实现 `ServiceRegistrar` 逻辑。
* 包含重试机制的 `register`
* 包含 `tokio::time::interval` (10s) 的 `start_heartbeat_loop`
* 确保能从环境变量 (如 `API_GATEWAY_URL`) 获取 Gateway 地址。
* [ ] **Task 1.3 (Gateway Core)**: 在 `api-gateway` 中实现 `ServiceRegistry` 状态管理(使用 `Arc<RwLock<HashMap<...>>>`)。
* [ ] **Task 1.4 (Gateway API)**: 在 `api-gateway` 中添加 `/v1/registry/*` 路由并挂载 Handler。
### Phase 2: Provider 改造 (Provider Migration)
*由于所有 Provider 架构一致,以下步骤需在 `tushare`, `finnhub`, `alphavantage`, `yfinance` 四个服务中重复执行:*
* [ ] **Task 2.1 (Config)**: 更新 `AppConfig`,增加 `gateway_url` 配置项。
* [ ] **Task 2.2 (Main Loop)**: 修改 `main.rs`
* 初始化 `ServiceRegistrar`
* 在 HTTP Server 启动前(或同时)调用 `registrar.register().await`
* 使用 `tokio::spawn` 启动 `registrar.start_heartbeat_loop()`
* [ ] **Task 2.3 (Shutdown)**: 添加 Graceful Shutdown 钩子,确保在收到 Ctrl+C 时调用 `registrar.deregister()`
### Phase 3: 消费端适配 (Gateway Consumption)
* [ ] **Task 3.1**: 修改 `api-gateway``test_data_source_config`,不再查 Config改为查 Registry。
* [ ] **Task 3.2**: 修改 `api-gateway``trigger_data_fetch`,根据 `service_name` (如 "tushare") 从 Registry 查找可用的 `base_url`
* 如果找到多个同名服务,可以做简单的 Load Balance轮询
* [ ] **Task 3.3**: 修改 `api-gateway``get_task_progress`,遍历 Registry 中的所有服务来聚合状态。
### Phase 4: 清理 (Cleanup)
* [ ] **Task 4.1**: 移除 `api-gateway` 中关于 `provider_services` 的静态配置代码和环境变量。
## 4. 预期收益
* **即插即用 (Plug-and-Play)**: 启动一个新的 Provider 实例,它会自动出现在系统中。
* **自愈 (Self-Healing)**: 如果 Provider 崩溃它会从注册表中消失TTL 过期Gateway 不会再向其发送请求,避免了无意义的等待和超时。
* **零配置 (Zero-Config)**: 扩容或迁移 Provider 时无需修改 Gateway 环境变量。

View File

@ -0,0 +1,45 @@
# 前端架构重构计划:状态管理与工作流控制权移交
## 1. 背景与现状
当前的 `fundamental-analysis` 前端项目源自一个 POC (Proof of Concept) 原型。在快速迭代过程中,遗留了大量“为了跑通流程而写”的临时逻辑。核心问题在于**前端承担了过多的业务控制逻辑**,导致前后端状态不一致、错误处理困难、用户体验割裂。
### 核心痛点
1. **“自嗨式”状态流转**:前端自行判断何时从“数据获取”切换到“分析报告”阶段(基于轮询结果推断),而非响应后端的明确指令。
2. **脆弱的 Polling + SSE 混合模式**:前端先轮询 HTTP 接口查询进度,再断开连接 SSE 流。这两者之间存在状态断层,且严重依赖 HTTP 接口的实时性(而这个接口又是后端实时聚合下游得来的,极易超时)。
3. **缺乏统一的状态源 (Source of Truth)**:前端维护了一套复杂的 `ReportState`,后端也有一套状态,两者通过不稳定的网络请求同步,经常出现“前端显示完成,后端还在跑”或“后端报错,前端还在转圈”的情况。
## 2. 重构目标
**原则前端归前端UI展示后端归后端业务逻辑与流转控制。**
1. **控制权移交**所有涉及业务流程流转Phase Transition的逻辑必须由后端通过事件或状态字段明确驱动。前端只负责渲染当前状态。
2. **单一数据流 (Single Stream)**废除“HTTP Polling -> SSE”的混合模式建立统一的 WebSocket 或 SSE 通道。从发请求那一刻起,所有状态变更(包括数据获取进度、分析进度、报错)全由服务端推送。
3. **简化状态机**:前端 `useReportEngine` 钩子应简化为单纯的“状态订阅者”,不再包含复杂的判断逻辑(如 `if (tasks.every(t => t.success)) switchPhase()`)。
## 3. 实施方案 (Tasks)
### Phase 1: 后端基础设施准备 (Backend Readiness)
- [ ] **统一事件流接口**:在 `api-gateway` 实现一个统一的 SSE/WebSocket 端点(如 `/v2/workflow/events`)。
- 该端点应聚合:`DataFetchProgress` (NATS), `WorkflowStart` (NATS), `ModuleProgress` (ReportGenerator), `WorkflowComplete`
- [ ] **Gateway 状态缓存**`api-gateway` 需要维护一个轻量级的 Request 状态缓存Redis 或 内存),不再实时透传查询请求给下游 Provider而是直接返回缓存的最新状态。
- [ ] **定义统一状态协议**:制定前后端通用的状态枚举(`PENDING`, `DATA_FETCHING`, `ANALYZING`, `COMPLETED`, `FAILED`)。
### Phase 2: 前端逻辑剥离 (Frontend Refactoring)
- [ ] **废除 useReportEngine 里的推断逻辑**:删除所有 `useEffect` 里关于状态切换的 `if/else` 判断代码。
- [ ] **实现 Event-Driven Hook**:重写 `useReportEngine`,使其核心逻辑变为:连接流 -> 收到事件 -> 更新 State。
- 收到 `STATUS_CHANGED: DATA_FETCHING` -> 显示数据加载 UI。
- 收到 `STATUS_CHANGED: ANALYZING` -> 自动切换到分析 UI无需前端判断数据是否齐备
- 收到 `ERROR` -> 显示错误 UI。
- [ ] **清理旧代码**:移除对 `/api/tasks` 轮询的依赖代码。
### Phase 3: 验证与兜底
- [ ] **断线重连机制**:实现 SSE/WS 的自动重连,并能从后端获取“当前快照”来恢复状态,防止刷新页面丢失进度。
- [ ] **超时兜底**:仅保留最基本的网络超时提示(如“服务器连接中断”),不再处理业务逻辑超时。
## 4. 复杂度评估与建议
- **复杂度**:中等偏高 (Medium-High)。涉及前后端协议变更和核心 Hook 重写。
- **风险**:高。这是系统的心脏部位,重构期间可能会导致整个分析流程暂时不可用。
- **建议****单独开一个线程Branch/Session进行**。不要在当前修复 Bug 的线程中混合进行。这需要系统性的设计和逐步替换,无法通过简单的 Patch 完成。
---
*Created: 2025-11-20*

View File

@ -0,0 +1,59 @@
# 系统日志分析与调试报告 (2025-11-20)
## 1. 系统现状快照
基于 `scripts/inspect_logs.sh` 的执行结果,当前系统各服务状态如下:
| 服务名称 | 状态 | 关键日志/行为 |
| :--- | :--- | :--- |
| **API Gateway** | 🟢 Running | 成功接收数据获取请求 (`FetchCompanyDataCommand`);成功注册服务;**未观测到**发送 `GenerateReportCommand`。 |
| **Data Persistence** | 🟢 Running | 数据库连接正常;成功写入 `session_data` (Source: `yfinance`, `tushare`)。 |
| **Report Generator** | 🟢 Running | 已启动并连接 NATS**无**收到任务的日志;服务似乎在 13:43 重启过。 |
| **Alphavantage** | 🟢 Running | 任务执行成功 (Task Completed)。 |
| **YFinance** | 🟢 Running | 任务执行成功 (Cache HIT)。 |
| **Tushare** | 🟢 Running | 配置轮询正常;有数据写入记录。 |
| **Finnhub** | 🟡 Degraded | **配置错误**`No enabled Finnhub configuration found`,导致服务降级,无法执行任务。 |
| **NATS** | 🟢 Running | 正常运行。 |
## 2. 现象分析
### 2.1 核心问题:报告生成流程中断
用户反馈 "点击后无反应/报错",日志显示:
1. **数据获取阶段 (Data Fetch)**
* API Gateway 接收到了数据获取请求 (Req ID: `935e6999...`)。
* Alphavantage, YFinance, Tushare 成功响应并写入数据。
* **Finnhub 失败/超时**由于配置缺失Finnhub Provider 处于降级状态,无法处理请求。
* API Gateway 的 Aggregator 显示 `Received 2/4 responses`。它可能在等待所有 Provider 返回,导致整体任务状态卡在 "InProgress"。
2. **报告生成阶段 (Report Generation)**
* **完全未触发**。`api-gateway` 日志中没有 `Publishing analysis generation command`
* `report-generator-service` 日志中没有 `Received NATS command`
### 2.2 根因推断
前端 (Frontend) 或 API Gateway 的聚合逻辑可能存在**"全有或全无" (All-or-Nothing)** 的依赖:
* 前端通常轮询 `/tasks/{id}`
* 如果 Finnhub 任务从未完成(挂起或失败未上报),聚合状态可能永远不是 "Completed"。
* 前端因此卡在进度条,从未发送 `POST /analysis-requests/{symbol}` 来触发下一步的报告生成。
## 3. 潜在风险与待办
1. **Finnhub 配置缺失**:导致服务不可用,拖累整体流程。
2. **容错性不足**:单个 Provider (Finnhub) 的失败似乎阻塞了整个 Pipeline。我们需要确保 "部分成功" 也能继续后续流程。
3. **Report Generator 重启**:日志显示该服务在 13:43 重启。如果此前有请求,可能因 Crash 丢失。需要关注其稳定性。
## 4. 下一步调试与修复计划
### Phase 1: 修复阻塞点
- [ ] **修复 Finnhub 配置**:检查数据库中的 `data_sources_config`,确保 Finnhub 有效启用且 API Key 正确。
- [ ] **验证容错逻辑**:检查 API Gateway 的 `Aggregator` 和 Frontend 的 `useReportEngine`,确保设置超时机制。如果 3/4 成功1/4 超时,应允许用户继续生成报告。
### Phase 2: 验证报告生成器
- [ ] **手动触发**:使用 Postman 或 `curl` 直接调用 `POST http://localhost:4000/v1/analysis-requests/{symbol}`,绕过前端等待逻辑,验证 Report Generator 是否能正常工作。
- [ ] **观察日志**:确认 Report Generator 收到指令并开始流式输出。
### Phase 3: 增强可观测性
- [ ] **完善日志**Report Generator 的日志偏少,建议增加 "Start processing module X" 等详细步骤日志。
---
*Report generated by AI Assistant.*

View File

@ -0,0 +1,90 @@
# UI Improvement: Parallel Data Provider Status & Error Reporting
## 1. Problem Statement
Currently, the Fundamental Analysis page shows a generic "Fetching Data..." loading state. The detailed status and errors from individual data providers (Tushare, YFinance, AlphaVantage) are aggregated into a single status in the API Gateway.
This causes two issues:
1. **Ambiguity**: Users cannot see which provider is working, finished, or failed.
2. **Hidden Errors**: If one provider fails (e.g., database error) but the overall task is still "in progress" (or generic failed), the specific error details are lost or not displayed prominently.
## 2. Goal
Update the API and UI to reflect the parallel nature of data fetching. The UI should display a "control panel" style view where each Data Provider has its own status card, showing:
- Provider Name (e.g., "Tushare")
- Current Status (Queued, In Progress, Completed, Failed)
- Progress Details (e.g., "Fetching data...", "Persisting...", "Error: 500 Internal Server Error")
## 3. Proposed Changes
### 3.1 Backend (API Gateway)
**Endpoint**: `GET /v1/tasks/{request_id}`
**Current Behavior**: Returns a single `TaskProgress` object (the first one found).
**New Behavior**: Returns a list of all tasks associated with the `request_id`.
**Response Schema Change**:
```json
// BEFORE
{
"request_id": "uuid",
"task_name": "tushare:600519.SS",
"status": "in_progress",
...
}
// AFTER
[
{
"request_id": "uuid",
"task_name": "tushare:600519.SS",
"status": "failed",
"details": "Error: 500 ...",
...
},
{
"request_id": "uuid",
"task_name": "yfinance:600519.SS",
"status": "completed",
...
}
]
```
### 3.2 Frontend
#### Types
Update `TaskProgress` handling to support array responses.
#### Logic (`useReportEngine` & `useTaskProgress`)
- **Aggregation Logic**:
- The overall "Phase Status" (Fetching vs Complete) depends on *all* provider tasks.
- **Fetching**: If *any* task is `queued` or `in_progress`.
- **Complete**: When *all* tasks are `completed` or `failed`.
- **Error Handling**: Do not fail the whole report if one provider fails. Allow partial success.
#### UI (`RawDataViewer` & `FinancialTable`)
Replace the single loader with a grid layout:
```tsx
// Conceptual Layout
<div className="grid grid-cols-3 gap-4">
<ProviderStatusCard name="Tushare" task={tushareTask} />
<ProviderStatusCard name="YFinance" task={yfinanceTask} />
<ProviderStatusCard name="AlphaVantage" task={avTask} />
</div>
```
**Card States**:
- **Waiting**: Gray / Spinner
- **Success**: Green Checkmark + "Data retrieved"
- **Error**: Red X + Error Message (expanded or tooltip)
## 4. Implementation Steps
1. **Backend**: Modify `services/api-gateway/src/api.rs` to return `Vec<TaskProgress>`.
2. **Frontend**:
- Update `TaskProgress` type definition.
- Update `useTaskProgress` fetcher.
- Update `useReportEngine` polling logic to handle array.
- Create `ProviderStatusCard` component.
- Update `RawDataViewer` to render the grid.

View File

@ -0,0 +1,99 @@
# 系统生命周期与异常处理分析 (System Lifecycle Analysis)
## 1. 核心问题 (Core Issue)
目前系统的业务逻辑缺乏**确定性 (Determinism)** 和 **闭环 (Closed-loop Lifecycle)**
虽然各个微服务独立运行,但缺乏统一的状态协调机制。当“快乐路径” (Happy Path) 被打断如DB报错下游服务无法感知上游的失败导致系统处于“僵尸状态” (Zombie State)。
> **用户反馈**:“有始必有终...你接了这个任务你就要负责把它结束掉...我们既然是微服务,那这个有始有终,可以说是跟生命性一样重要的一个基本原则。”
## 2. 现状分析 (Current State Analysis)
### 2.1 当前的数据流与控制流
```mermaid
sequenceDiagram
User->>API Gateway: 1. POST /data-requests
API Gateway->>NATS: 2. Pub "data_fetch_commands"
par Provider Execution
NATS->>Provider: 3. Receive Command
Provider->>Provider: 4. Fetch External Data
Provider-->>DB: 5. Persist Data (Upsert)
end
rect rgb(20, 0, 0)
Note right of DB: [CRITICAL FAILURE POINT]
DB-->>Provider: 500 Error (Panic)
end
alt Happy Path
Provider->>NATS: 6. Pub "events.financials_persisted"
NATS->>Report Gen: 7. Trigger Analysis
else Failure Path (Current)
Provider->>Log: Log Error
Provider->>TaskStore: Update Task = Failed
Note right of Provider: 链条在此断裂 (Chain Breaks Here)
end
User->>API Gateway: 8. Poll Task Status
API Gateway-->>User: All Failed
User->>User: 9. Frontend Logic: "All Done" -> Switch to Analysis UI
User->>API Gateway: 10. Connect SSE (Analysis Stream)
Note right of User: Hangs forever (Waiting for Report Gen that never started)
```
### 2.2 存在的具体缺陷
1. **隐式依赖链 (Implicit Dependency Chain)**:
* Report Generator 被动等待 `FinancialsPersistedEvent`。如果 Provider 挂了事件永远不发Report Generator 就像一个不知道此时该上班的工人,一直在睡觉。
2. **缺乏全局协调者 (Lack of Orchestration)**:
* API Gateway 把命令发出去就不管了(除了被动提供查询)。
* 没有人负责说:“嘿,数据获取全部失败了,取消本次分析任务。”
3. **前端的状态误判**:
* 前端认为 `Failed` 也是一种 `Completed`(终止态),这是对的。但前端错误地假设“只要终止了就可以进行下一步”。
* **修正原则**:只有 `Success` 才能驱动下一步。`Failed` 应该导致整个工作流的**熔断 (Circuit Break)**。
## 3. 改进方案 (Improvement Plan)
我们需要引入**Rustic**的确定性原则:**如果不能保证成功,就明确地失败。**
### 3.1 方案一:引入显式的工作流状态 (Explicit Workflow State) - 推荐
我们不需要引入沉重的 Workflow Engine (如 Temporal),但在逻辑上必须闭环。
**后端改进:**
1. **修复数据库错误**:这是首要任务。`unexpected null` 必须被修复。
2. **事件驱动的失败传播 (Failure Propagation)**
* 如果 Provider 失败,发送 `events.data_fetch_failed`
* Report Generator 或者 API Gateway 监听这个失败事件?
* **更好方案**Report Generator 不需要监听失败。API Gateway 需要聚合状态。
**前端/交互改进:**
1. **熔断机制**
* 在 `useReportEngine` 中,如果所有 Task 都是 `Failed`**绝对不要**进入 Analysis 阶段。
* 直接在界面显示:“数据获取失败,无法生成最新报告。是否查看历史数据?”
### 3.2 具体的实施步骤 (Action Items)
#### Phase 1: 修复根本错误 (Fix the Root Cause)
* **Task**: 调试并修复 `data-persistence-service` 中的 `500 Internal Server Error`
* 原因推测:数据库 schema 中某列允许 NULL但 Rust 代码中定义为非 Option 类型;或者反之。
* 错误日志:`unexpected null; try decoding as an Option`。
#### Phase 2: 完善生命周期逻辑 (Lifecycle Logic)
* **Task (Frontend)**: 修改 `useReportEngine`
* 逻辑变更:`if (allTasksFailed) { stop(); show_error(); }`
* 逻辑变更:`if (partialSuccess) { proceed_with_warning(); }`
* **Task (Backend - ReportGen)**: 增加超时机制。
* 如果用户连接了 SSE 但长时间没有数据(因为没收到事件),应该发送一个 Timeout 消息给前端,结束连接,而不是无限挂起。
## 4. 结论
目前的“卡在 Analyzing”是因为**上游失败导致下游触发器丢失**,叠加**前端盲目推进流程**导致的。
我们必须:
1. 修好 DB 错误(让快乐路径通畅)。
2. 在前端增加“失败熔断”,不要在没有新数据的情况下假装去分析。
---
*Created: 2025-11-20*

View File

@ -0,0 +1,110 @@
# 系统日志分析与调试操作指南 (System Debugging Guide)
本文档旨在记录当前系统的运行状况、已知问题以及标准化的调试流程。它将指导开发人员如何利用现有工具(如 Docker、Tilt、自定义脚本快速定位问题。
## 1. 系统现状 (System Status Snapshot)
截至 2025-11-20Fundamental Analysis 系统由多个微服务组成,采用 Docker Compose 编排,并通过 Tilt 进行开发环境的热重载管理。
### 1.1 服务概览
| 服务名称 | 职责 | 当前状态 | 关键依赖 |
| :--- | :--- | :--- | :--- |
| **API Gateway** | 流量入口,任务分发,服务发现 | 🟢 正常 | NATS, Providers |
| **Report Generator** | 接收指令,调用 LLM 生成报告 | 🟢 正常 (但在等待任务) | NATS, Data Persistence, LLM API |
| **Data Persistence** | 数据库读写配置管理Session 数据隔离 | 🟢 正常 (已恢复 Seeding) | Postgres |
| **Alphavantage** | 美股数据 Provider | 🟢 正常 | NATS, External API |
| **YFinance** | 雅虎财经 Provider | 🟢 正常 | NATS, External API |
| **Tushare** | A股数据 Provider | 🟢 正常 | NATS, External API |
| **Finnhub** | 市场数据 Provider | 🟡 **降级 (Degraded)** | 缺少 API Key 配置 |
### 1.2 核心问题:报告生成流程阻塞
目前用户在前端点击 "生成报告" 后无反应。
* **现象**API Gateway 未收到生成报告的请求Report Generator 未收到 NATS 消息。
* **原因推断**Finnhub Provider 因配置缺失处于 "Degraded" 状态,导致前端轮询的任务列表 (`GET /tasks/{id}`) 中始终包含未完成/失败的任务。前端逻辑可能因等待所有 Provider 完成而阻塞了后续 "Generate Report" 请求的发送。
---
## 2. 运维与开发流程 (DevOps & Workflow)
我们使用 **Tilt** 管理 Docker Compose 环境。这意味着你不需要手动 `docker-compose up/down` 来应用代码变更。
### 2.1 启动与更新
1. **启动环境**
在项目根目录运行:
```bash
tilt up
```
这会启动所有服务,并打开 Tilt UI (通常在 `http://localhost:10350`)。
2. **代码更新**
* 直接在 IDE 中修改代码并保存。
* **Tilt 会自动检测变更**
* 如果是前端代码Tilt 会触发前端热更新。
* 如果是 Rust 服务代码Tilt 会在容器内或宿主机触发增量编译并重启服务。
* **操作建议**:修改代码后,只需**等待一会儿**,观察 Tilt UI 变绿即可。无需手动重启容器。
3. **配置变更**
* 如果修改了 `docker-compose.yml``.env`Tilt 通常也会检测到并重建相关资源。
### 2.2 快速重置数据库 (如有必要)
如果遇到严重的数据不一致或认证问题,可使用以下命令重置数据库(**警告:数据将丢失,但会自动 Seed 默认模板**
```bash
docker-compose down postgres-db
docker volume rm fundamental_analysis_pgdata
docker-compose up -d postgres-db
# 等待几秒后
# Tilt 会自动重启依赖 DB 的服务,触发 Seeding
```
---
## 3. 调试与分析工具 (Debugging Tools)
为了快速诊断跨服务的问题,我们提供了一个能够聚合查看所有容器最新日志的脚本。
### 3.1 `inspect_logs.sh` 使用指南
该脚本位于 `scripts/inspect_logs.sh`。它能一次性输出所有关键服务的最后 N 行日志,避免手动切换容器查看。
* **基本用法** (默认显示最后 10 行)
```bash
./scripts/inspect_logs.sh
```
* **指定行数** (例如查看最后 50 行)
```bash
./scripts/inspect_logs.sh 50
```
### 3.2 分析策略
当遇到 "点击无反应" 或 "流程卡住" 时,请按以下步骤操作:
1. **运行脚本**`./scripts/inspect_logs.sh 20`
2. **检查 API Gateway**
* 是否有 `Received data fetch request` -> 如果无,说明前端没发请求。
* 是否有 `Publishing analysis generation command` -> 如果无,说明 Gateway 没收到生成指令,或者内部逻辑(如等待 Provider卡住了。
3. **检查 Provider**
* 是否有 `Degraded``Error` 日志?(如当前的 Finnhub 问题)
4. **检查 Report Generator**
* 是否有 `Received NATS command` -> 如果无,说明消息没发过来。
---
## 4. 当前待办与修复建议 (Action Items)
为了打通流程,我们需要解决 Finnhub 导致的阻塞问题。
1. **修复配置**
* 在 `config/data_sources.yaml` (或数据库 `configs` 表) 中配置有效的 Finnhub API Key。
* 或者,暂时在配置中**禁用** Finnhub (`enabled: false`),让前端忽略该 Provider。
2. **前端容错**
* 检查前端 `useReportEngine.ts`
* 确保即使某个 Provider 失败/超时,用户依然可以强制触发 "Generate Report"。
3. **验证**
* 使用 `inspect_logs.sh` 确认 Finnhub 不再报错,或已被跳过。
* 确认 API Gateway 日志中出现 `Publishing analysis generation command`

View File

@ -0,0 +1,144 @@
# 测试策略设计文档:基于 Docker 环境的组件测试与 Orchestrator 逻辑验证
> **文档使用说明**:
> 本文档不仅作为测试设计方案,也是测试实施过程中的**Living Document (活文档)**。
> 请参阅第 4 节 "执行状态追踪 (Execution Status Tracking)" 了解当前进度、Milestones 和 Pending Tasks。
> 在每次完成重要步骤后,请更新此文档的状态部分。
## 1. 策略概述 (Strategy Overview)
响应“无 Mock、全真实环境”的要求结合“Rustic 强类型”设计原则,我们将采用 **混合测试策略 (Hybrid Strategy)**
1. **I/O 密集型服务 (Providers & ReportGen)**: 采用 **基于 Docker Compose 的组件集成测试**
* 直接连接真实的 Postgres, NATS 和第三方 API (Alphavantage/LLM)。
* 验证“端到端”的功能可用性Key 是否有效、数据格式是否兼容)。
2. **逻辑密集型服务 (Orchestrator)**: 采用 **基于 Trait 的内存测试 (In-Memory Testing)**
* 通过 Trait 抽象外部依赖,使用简单的内存实现 (Fake) 替代真实服务。
* 实现毫秒级反馈,覆盖复杂的状态机跳转和边界条件。
---
## 2. 实施阶段 (Implementation Phases)
### Phase 1: 测试基础设施 (Infrastructure)
* **Docker Environment**: `docker-compose.test.yml`
* `postgres-test`: 端口 `5433:5432`
* `nats-test`: 端口 `4223:4222`
* `persistence-test`: 端口 `3001:3000` (Data Persistence Service 本身也视作基础设施的一部分)
* **Abstraction (Refactoring)**:
* 在 `workflow-orchestrator-service` 中定义 `WorkflowRepository``CommandPublisher` traits用于解耦逻辑测试。
### Phase 2: 微服务组件测试 (IO-Heavy Services)
**执行方式**: 宿主机运行 `cargo test`,环境变量指向 Phase 1 启动的 Docker 端口。
#### 1. Data Providers (数据源)
验证从 API 获取数据并存入系统的能力。
* **Alphavantage Provider**: (Key: `alphaventage_key`)
* Input: `FetchCompanyDataCommand`
* Assert: DB 中存入 SessionData (Profile/Financials)NATS 发出 `FinancialsPersistedEvent`
* **Tushare Provider**: (Key: `tushare_key`)
* Input: `FetchCompanyDataCommand` (CN Market)
* Assert: 同上。
* **Finnhub Provider**: (Key: `finnhub_key`)
* Input: `FetchCompanyDataCommand`
* Assert: 同上。
* **YFinance Provider**: (No Key)
* Input: `FetchCompanyDataCommand`
* Assert: 同上。
#### 2. Report Generator (报告生成器)
验证从 Persistence 读取数据并调用 LLM 生成报告的能力。
* **Key**: `openrouter_key` (Model: `google/gemini-flash-1.5` 或其他低成本模型)
* **Pre-condition**: 需要先往 Persistence (localhost:3001) 插入一些伪造的 SessionData (Financials/Price),否则 LLM 上下文为空。
* **Input**: `GenerateReportCommand`
* **Logic**:
1. Service 从 Persistence 读取数据。
2. Service 组装 Prompt 调用 OpenRouter API。
3. Service 将生成的 Markdown 存回 Persistence。
* **Assert**:
* NATS 收到 `ReportGeneratedEvent`
* Persistence 中能查到 `analysis_report` 类型的 SessionData且内容非空。
### Phase 3: Orchestrator 逻辑测试 (Logic-Heavy)
**执行方式**: 纯内存单元测试,无需 Docker。
* **Refactoring**: 将 Orchestrator 的核心逻辑 `WorkflowEngine` 修改为接受 `Box<dyn WorkflowRepository>``Box<dyn CommandPublisher>`
* **Test Suite**:
* **DAG Construction**: 给定不同 Template ID验证生成的 DAG 结构(依赖关系)是否正确。
* **State Transition**:
* Scenario 1: Happy Path (所有 Task 成功 -> Workflow 完成)。
* Scenario 2: Dependency Failure (上游失败 -> 下游 Skipped)。
* Scenario 3: Resume (模拟服务重启,从 Repository 加载状态并继续)。
* **Policy Check**: 验证 "At least one provider" 策略是否生效。
### Phase 4: 全链路验收测试 (E2E)
**执行方式**: `scripts/run_e2e.sh` (Docker + Rust Test Runner)
* **配置策略**:
* 动态注入测试配置 (`setup_test_environment`):
* 注册 `simple_test_analysis` 模板。
* 配置 LLM Provider (`openrouter`/`new_api`) 使用 `google/gemini-2.5-flash-lite`
* **超时控制**:
* SSE 连接监听设置 60秒硬性超时防止长连接假死。
* **Scenarios**:
* **Scenario A (Happy Path)**: 使用 `simple_test_analysis` 模板完整运行。
* **Scenario B (Recovery)**: 模拟 Orchestrator 重启,验证状态恢复。 (SKIPPED: Requires DB Persistence)
* **Scenario C (Partial Failure)**: 模拟非关键 Provider (Tushare) 故障,验证工作流不受影响。
* **Scenario D (Invalid Input)**: 使用无效 Symbol验证错误传播和快速失败。
* **Scenario E (Module Failure)**: 模拟 Analysis 模块内部错误(如配置错误),验证工作流终止。
* **Status**: ✅ Completed (2025-11-21)
---
## 3. 执行计划 (Action Plan)
1. **Environment**: 创建 `docker-compose.test.yml` 和控制脚本。 ✅
2. **Providers Test**: 编写 4 个 Data Provider 的集成测试。 ✅
3. **ReportGen Test**: 编写 Report Generator 的集成测试(含数据预埋逻辑)。 ✅
4. **Orchestrator Refactor**: 引入 Traits 并编写内存测试。 ✅
5. **Final Verification**: 运行全套测试。 ✅
---
## 4. 执行状态追踪 (Execution Status Tracking)
### 当前状态 (Current Status)
* **日期**: 2025-11-21
* **阶段**: Phase 4 - E2E Testing Completed
* **最近活动**:
* 修复了测试模板配置错误导致 Scenario A 超时的问题。
* 修复了 Orchestrator 错误广播 Analysis 失败导致 Scenario C 误判的问题。
* 完整验证了 Scenario A, C, D, E。
* 暂时跳过 Scenario B (待持久化层就绪后启用)。
### 历史记录 (Milestones)
| 日期 | 阶段 | 事件/变更 | 状态 |
| :--- | :--- | :--- | :--- |
| 2025-11-20 | Planning | 完成测试策略文档编写,确定混合测试方案。 | ✅ Completed |
| 2025-11-20 | Phase 1 | 创建 `docker-compose.test.yml` 和基础设施。 | ✅ Completed |
| 2025-11-20 | Phase 2 | 完成 Data Providers 集成测试代码。 | ✅ Completed |
| 2025-11-20 | Phase 2 | 完成 Report Generator 集成测试代码。 | ✅ Completed |
| 2025-11-20 | Phase 3 | 完成 Orchestrator 重构与内存测试。 | ✅ Completed |
| 2025-11-21 | Phase 4 | 修复 SSE 超时问题,增加动态配置注入。 | ✅ Completed |
| 2025-11-21 | Phase 4 | 实现并验证异常场景 (Partial Failure, Invalid Input, Module Error)。 | ✅ Completed |
### 待处理项 (Next Steps)
- [ ] **Persistence**: 为 Orchestrator 引入 Postgres 存储,启用 Scenario B。
- [ ] **CI Integration**: 将 `run_e2e.sh` 集成到 CI 流水线。
## 5. 未来展望 (Future Outlook)
随着系统演进,建议增加以下测试场景:
1. **Network Resilience (网络分区)**:
* 使用 `toxiproxy` 或 Docker Network 操作模拟网络中断。
* 验证服务的重试机制 (Retry Policy) 和幂等性。
2. **Concurrency & Load (并发与负载)**:
* 同时启动 10+ 个工作流,验证 Orchestrator 调度和 Provider 吞吐量。
* 验证 Rate Limiting 是否生效(避免被上游 API 封禁)。
3. **Long-Running Workflows (长流程)**:
* 测试包含数十个步骤、运行时间超过 5 分钟的复杂模板。
* 验证 SSE 连接保活和超时处理。
4. **Data Integrity (数据一致性)**:
* 验证 Fetch -> Persistence -> Report Gen 链路中的数据精度(小数位、时区)。

View File

@ -0,0 +1,96 @@
# Phase 4: End-to-End (E2E) 测试计划与执行方案
## 1. 测试目标
本次 E2E 测试旨在验证系统在“全链路真实环境”下的行为,涵盖**正常流程**、**异常恢复**及**组件动态插拔**场景。不涉及前端 UI而是通过模拟 HTTP/SSE 客户端直接与后端交互。
核心验证点:
1. **业务闭环**: 从 `POST /start` 到 SSE 接收 `WorkflowCompleted` 再到最终报告生成。
2. **状态一致性**: Orchestrator 重启后,能否通过 `SyncStateCommand` 恢复上下文并继续执行。
3. **容错机制**: 当部分 Data Provider 下线时,策略引擎是否按预期工作(如 "At least one provider")。
4. **并发稳定性**: 多个 Workflow 同时运行时互不干扰。
## 2. 测试环境架构
测试运行器 (`end-to-end` Rust Crate) 将作为外部观察者和控制器。
```mermaid
graph TD
TestRunner[Rust E2E Runner] -->|HTTP/SSE| Gateway[API Gateway]
TestRunner -->|Docker API| Docker[Docker Engine]
subgraph "Docker Compose Stack"
Gateway --> Orchestrator
Orchestrator --> NATS
NATS --> Providers
NATS --> ReportGen
Providers --> Postgres
end
Docker -.->|Stop/Start| Orchestrator
Docker -.->|Stop/Start| Providers
```
## 3. 详细测试场景 (Scenarios)
### Scenario A: The Happy Path (基准测试)
* **目标**: 验证标准流程无误。
* **步骤**:
1. 发送 `POST /api/v2/workflow/start` (Symbol: AAPL/000001.SZ)。
2. 建立 SSE 连接监听 `events.workflow.{id}`
3. 验证接收到的事件序列:
* `WorkflowStarted` (含完整 DAG)
* `TaskStateChanged` (Pending -> Running -> Completed)
* `TaskStreamUpdate` (Report 内容流式传输)
* `WorkflowCompleted`
4. **断言**: 最终报告内容非空,数据库中存在 Analysis 记录。
### Scenario B: Brain Transplant (Orchestrator 宕机恢复)
* **目标**: 验证 Orchestrator 的状态持久化与快照恢复能力。
* **步骤**:
1. 启动 Workflow。
2. 等待至少一个 Data Fetch Task 完成 (Receiving `TaskCompleted`)。
3. **Action**: `docker stop workflow-orchestrator-service`
4. 等待 5 秒,**Action**: `docker start workflow-orchestrator-service`
5. Test Runner 重新建立 SSE 连接 (自动触发 `SyncStateCommand`)。
6. **断言**:
* 收到 `WorkflowStateSnapshot` 事件。
* 快照中已完成的任务状态保持 `Completed`
* 流程继续向下执行,直到最终完成。
### Scenario C: Partial Failure (组件拔插)
* **目标**: 验证 "At least one provider" 容错策略。
* **步骤**:
1. **Action**: `docker stop tushare-provider-service` (模拟 Tushare 挂掉)。
2. 启动 Workflow (Symbol: 000001.SZ需涉及 Tushare)。
3. **断言**:
* Tushare 对应的 Task 状态变为 `Failed``Skipped`
* 由于还有其他 Provider (或模拟数据)Orchestrator 判定满足 "At least one" 策略。
* 下游 Analysis Task **正常启动** (而不是被 Block)。
* 流程最终显示 `WorkflowCompleted` (可能带有 Warning)。
4. **Cleanup**: `docker start tushare-provider-service`
### Scenario D: Network Jitter (网络中断模拟)
* **目标**: 验证 Gateway 到 Orchestrator 通讯中断后的恢复。
* **步骤**:
1. 启动 Workflow。
2. Test Runner 主动断开 SSE 连接。
3. 等待 10 秒。
4. Test Runner 重连 SSE。
5. **断言**: 立即收到 `WorkflowStateSnapshot`,且补齐了断连期间产生的状态变更。
## 4. 工程实现 (Rustic Implementation)
新建独立 Rust Crate `tests/end-to-end`,不依赖现有 workspace 的构建配置,独立编译运行。
**依赖栈**:
* `reqwest`: HTTP Client
* `eventsource-stream` + `futures`: SSE Handling
* `bollard`: Docker Control API
* `tokio`: Async Runtime
* `anyhow`: Error Handling
* `serde`: JSON Parsing
**执行方式**:
```bash
# 在 tests/end-to-end 目录下
cargo run -- --target-env test
```

View File

@ -0,0 +1,62 @@
# Backend Requirements for Frontend Refactor
由于前端正在进行“破坏式”重构,删除了所有包含业务逻辑控制、状态推断、流程编排的代码(如 `useReportEngine`, `ExecutionStepManager`后端必须接管以下职责以支持纯被动式Puppet Mode的前端。
## 1. 核心原则
前端不再拥有“大脑”,只拥有“眼睛”和“耳朵”。所有状态变更、流程流转、错误判断全由后端指令驱动。
## 2. 接口需求
### 2.1 统一事件流 (Unified Event Stream)
前端将只连接**一个**长连接通道SSE 或 WebSocket用于接收整个分析周期的所有信息。
* **Endpoint**: `/api/v2/workflow/events?request_id={id}` (建议)
* **职责**: 聚合 NATS (Data Fetching), Internal State (Report Generator), Database (Persistence) 的所有事件。
### 2.2 事件类型定义 (Protocol)
后端需要推送以下类型的事件,且 Payload 必须包含前端渲染所需的所有上下文,前端不再发起二次请求查询详情。
1. **`WORKFLOW_START`**
* 标志流程开始。
* Payload: `{ symbol, market, template_id, timestamp }`
2. **`PHASE_CHANGED`**
* **关键**: 前端不再判断何时切换界面,完全依赖此事件。
* Payload: `{ phase: 'DATA_FETCHING' | 'ANALYZING' | 'COMPLETED' | 'FAILED', previous_phase: '...' }`
3. **`TASK_PROGRESS` (Data Fetching Phase)**
* 替代前端轮询 `/api/tasks`
* Payload: `{ task_id, provider, status, progress, message }`
* **注意**: 后端需负责聚合多个 Provider 的进度,前端只管展示列表。
4. **`MODULE_PROGRESS` (Analysis Phase)**
* 替代旧的 SSE 流。
* Payload: `{ module_id, content_delta, status }`
5. **`WORKFLOW_ERROR`**
* **关键**: 包含错误级别Fatal/Warning。前端只展示不判断是否重试。
* Payload: `{ code, message, is_fatal, suggestion }`
## 3. 逻辑接管需求
### 3.1 状态机迁移 (State Transitions)
* **旧逻辑 (已删)**: 前端轮询任务 -> `if (all_tasks_done) start_analysis()`.
* **新逻辑**: 后端 `Workflow Orchestrator` 监听任务完成事件 -> 自动触发分析 -> 推送 `PHASE_CHANGED: ANALYZING` 给前端。
### 3.2 容错与部分成功 (Partial Success)
* **旧逻辑 (已删)**: 前端判断 `if (failed_tasks < total) continue`.
* **新逻辑**: 后端决定数据缺失量是否允许继续分析。如果允许,直接进入分析阶段;如果不允许,推送 `WORKFLOW_ERROR`
### 3.3 超时控制 (Timeout)
* **旧逻辑 (已删)**: 前端 `setTimeout(10min)`.
* **新逻辑**: 后端设置执行超时。如果超时,主动推送 Error 事件关闭连接。前端仅处理网络层面的断开重连。
### 3.4 断点恢复 (Resume)
* **需求**: 当用户刷新页面重连 SSE 时,后端必须立即推送一条 `SNAPSHOT` 事件,包含当前所有已完成的任务、已生成的报告片段、当前所处的阶段。
* **目的**: 防止前端因为丢失历史事件而无法渲染完整界面。
## 4. 废弃接口
以下接口的前端调用代码已被删除,后端可酌情保留用于调试,但业务不再依赖:
* `GET /api/tasks/{id}` (轮询接口)
* `GET /api/analysis-results/stream` (旧的纯分析流,需升级为统一流)

View File

@ -0,0 +1,132 @@
# NATS Subject 强类型重构设计文档
## 1. 背景与现状 (Background & Status Quo)
目前,项目中微服务之间的 NATS 消息通信主要依赖于硬编码的字符串String Literals来指定 Subject主题。例如
- `services/report-generator-service` 使用 `"events.analysis.report_generated"` 发布消息。
- `services/workflow-orchestrator-service` 使用 `"events.analysis.>"` 订阅消息,并使用字符串匹配 `if subject == "events.analysis.report_generated"` 来区分消息类型。
这种方式存在以下问题:
1. **弱类型约束**字符串拼接容易出现拼写错误Typos且无法在编译期捕获只能在运行时发现违反了 "Fail Early" 原则。
2. **维护困难**Subject 散落在各个服务的代码中缺乏统一视图Single Source of Truth修改一个 Subject 需要全局搜索并小心替换。
3. **缺乏契约**Subject 与 Payload消息体之间的对应关系仅通过注释或隐式约定存在缺乏代码层面的强制约束。
## 2. 目的 (Objectives)
本设计旨在贯彻 Rustic 的工程原则(强类型约束、单一来源、早失败、无回退),通过以下方式重构 NATS Subject 的管理:
1. **强类型枚举 (Enum-driven Subjects)**:在 `common-contracts` 中定义全局唯一的枚举类型,涵盖系统中所有合法的 NATS Subject。
2. **消除魔法字符串**:禁止在业务逻辑中直接使用字符串字面量进行 publish 或 subscribe 操作。
3. **编译期安全**:利用 Rust 的类型系统,确保 Subject 的构造和匹配是合法的。
## 3. 设计方案 (Design Proposal)
### 3.1 核心数据结构 (`common-contracts`)
`services/common-contracts/src/subjects.rs` 中定义 `NatsSubject` 枚举。该枚举涵盖系统中所有合法的 NATS Subject。
```rust
use uuid::Uuid;
use std::fmt;
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum NatsSubject {
// --- Commands ---
WorkflowCommandStart, // "workflow.commands.start"
WorkflowCommandSyncState, // "workflow.commands.sync_state"
DataFetchCommands, // "data_fetch_commands"
AnalysisCommandGenerateReport, // "analysis.commands.generate_report"
// --- Events ---
// Analysis Events
AnalysisReportGenerated, // "events.analysis.report_generated"
AnalysisReportFailed, // "events.analysis.report_failed"
// Data Events
DataFinancialsPersisted, // "events.data.financials_persisted"
DataFetchFailed, // "events.data.fetch_failed"
// Workflow Events (Dynamic)
WorkflowProgress(Uuid), // "events.workflow.{uuid}"
// --- Wildcards (For Subscription) ---
AnalysisEventsWildcard, // "events.analysis.>"
WorkflowCommandsWildcard, // "workflow.commands.>"
DataEventsWildcard, // "events.data.>"
}
// ... impl Display and FromStr ...
```
### 3.2 使用方式
#### 发布消息 (Publish)
```rust
// Old
state.nats.publish("events.analysis.report_generated", payload).await?;
// New
use common_contracts::subjects::NatsSubject;
state.nats.publish(NatsSubject::AnalysisReportGenerated.to_string(), payload).await?;
```
#### 订阅与匹配 (Subscribe & Match)
```rust
// Old
let sub = nats.subscribe("events.analysis.>").await?;
while let Some(msg) = sub.next().await {
if msg.subject == "events.analysis.report_generated" { ... }
}
// New
let sub = nats.subscribe(NatsSubject::AnalysisEventsWildcard.to_string()).await?;
while let Some(msg) = sub.next().await {
// 将接收到的 subject 字符串尝试转换为枚举
match NatsSubject::try_from(msg.subject.as_str()) {
Ok(NatsSubject::AnalysisReportGenerated) => {
// Handle report generated
},
Ok(NatsSubject::AnalysisReportFailed) => {
// Handle report failed
},
_ => {
// Log warning or ignore
}
}
}
```
## 4. 实施状态 (Implementation Status)
### 4.1 `common-contracts`
- [x] 定义 `NatsSubject` 枚举及相关 Trait (`Display`, `FromStr`) 在 `src/subjects.rs`
- [x] 添加单元测试确保 Round-trip 正确性。
### 4.2 `report-generator-service`
- [x] `src/worker.rs`: 替换 Publish Subject。
### 4.3 `workflow-orchestrator-service`
- [x] `src/message_consumer.rs`: 替换 Subscribe Subject 和 Match 逻辑。
### 4.4 `api-gateway`
- [x] `src/api.rs`: 替换 Publish Subject。
### 4.5 Provider Services
- [x] `finnhub-provider-service`: 替换 Subscribe Subject移除魔法字符串常量。
- [x] `alphavantage-provider-service`: 替换 Subscribe Subject移除魔法字符串常量。
- [x] `tushare-provider-service`: 替换 Subscribe Subject移除魔法字符串常量。
- [x] `yfinance-provider-service`: 替换 Subscribe Subject移除魔法字符串常量。
## 5. 进阶优化 (Future Work)
- [x] **关联 Payload 类型**: 利用 Rust 的 trait 系统,将 Subject 枚举与对应的 Payload 结构体关联起来,使得 `publish` 函数能够根据 Subject 自动推断 Payload 类型,从而防止 Subject 与 Payload 不匹配的问题。
```rust
trait SubjectMessage {
// type Payload: Serialize + DeserializeOwned; // Simplified: trait is implemented on Payload struct itself
fn subject(&self) -> NatsSubject;
}
```
已在 `services/common-contracts/src/subjects.rs` 中实现 `SubjectMessage` trait并在 `messages.rs` 中为各个 Command/Event 实现了该 trait。各服务已更新为使用 `msg.subject().to_string()` 进行发布。

View File

@ -35,8 +35,10 @@ export function AnalysisContent({
const contentWithoutTitle = removeTitleFromContent(state.content, analysisName); const contentWithoutTitle = removeTitleFromContent(state.content, analysisName);
const normalizedContent = normalizeMarkdown(contentWithoutTitle); const normalizedContent = normalizeMarkdown(contentWithoutTitle);
const isGenerating = state.loading;
return ( return (
<div className="space-y-4"> <div className="space-y-4 relative">
<h2 className="text-lg font-medium">{analysisName} {modelName || 'AI'}</h2> <h2 className="text-lg font-medium">{analysisName} {modelName || 'AI'}</h2>
{!financials && ( {!financials && (
@ -64,16 +66,16 @@ export function AnalysisContent({
: '待开始'} : '待开始'}
</div> </div>
</div> </div>
{/* 始终可见的"重新生成分析"按钮 */} {/* 重新生成按钮 */}
{!state.loading && ( {!state.loading && (
<Button <Button
variant="ghost" variant="ghost"
size="sm" size="sm"
onClick={() => retryAnalysis(analysisType)} onClick={() => retryAnalysis(analysisType)}
disabled={currentAnalysisTask !== null} disabled={currentAnalysisTask !== null || isGenerating}
> >
<RotateCw className="size-4" /> <RotateCw className={`size-4 ${isGenerating ? 'animate-spin' : ''}`} />
{isGenerating ? '生成中...' : '重新生成分析'}
</Button> </Button>
)} )}
</div> </div>
@ -82,7 +84,20 @@ export function AnalysisContent({
<p className="text-red-500">: {state.error}</p> <p className="text-red-500">: {state.error}</p>
)} )}
{(state.loading || state.content) && ( {/* Content Area with Overlay */}
<div className="relative min-h-[200px]">
{/* Overlay when generating */}
{isGenerating && (
<div className="absolute inset-0 bg-background/80 backdrop-blur-sm z-10 flex flex-col items-center justify-center space-y-4 rounded-lg border">
<Spinner className="size-8 text-primary" />
<p className="text-sm font-medium text-muted-foreground animate-pulse">
...
</p>
</div>
)}
{/* Existing Content or Placeholder */}
{state.content ? (
<div className="space-y-4"> <div className="space-y-4">
<div className="border rounded-lg p-6 bg-card"> <div className="border rounded-lg p-6 bg-card">
<article className="markdown-body" style={{ <article className="markdown-body" style={{
@ -97,16 +112,15 @@ export function AnalysisContent({
> >
{normalizedContent} {normalizedContent}
</ReactMarkdown> </ReactMarkdown>
{state.loading && (
<span className="inline-flex items-center gap-2 mt-2 text-muted-foreground">
<Spinner className="size-3" />
<span className="text-sm">...</span>
</span>
)}
</article> </article>
</div> </div>
</div> </div>
) : !isGenerating && (
<div className="flex items-center justify-center h-full text-muted-foreground border rounded-lg p-12 border-dashed">
</div>
)} )}
</div>
</> </>
)} )}
</div> </div>

View File

@ -0,0 +1,303 @@
import { useState, useRef, useEffect, useMemo } from 'react';
import { useDataRequest, useTaskProgress, useAnalysisResults } from '@/hooks/useApi';
interface AnalysisState {
content: string;
loading: boolean;
error: string | null;
elapsed_ms?: number;
}
interface AnalysisRecord {
type: string;
name: string;
status: 'pending' | 'running' | 'done' | 'error';
start_ts?: string;
end_ts?: string;
duration_ms?: number;
tokens?: {
prompt_tokens: number;
completion_tokens: number;
total_tokens: number;
};
error?: string;
}
export function useAnalysisRunner(
financials: any,
financialConfig: any,
normalizedMarket: string,
unifiedSymbol: string,
isLoading: boolean,
error: any,
templateSets: any // Added templateSets
) {
// --- Template Logic ---
const [selectedTemplateId, setSelectedTemplateId] = useState<string>('');
const reportTemplateId = financials?.meta?.template_id;
// Sync selected template with report template when report loads
useEffect(() => {
if (reportTemplateId) {
setSelectedTemplateId(reportTemplateId);
}
}, [reportTemplateId]);
// Set default template if nothing selected and no report template
useEffect(() => {
if (!selectedTemplateId && !reportTemplateId && templateSets && Object.keys(templateSets).length > 0) {
const defaultId = Object.keys(templateSets).find(k => k.includes('standard') || k === 'default') || Object.keys(templateSets)[0];
setSelectedTemplateId(defaultId);
}
}, [templateSets, selectedTemplateId, reportTemplateId]);
// Determine active template set
const activeTemplateId = selectedTemplateId;
const activeTemplateSet = useMemo(() => {
if (!activeTemplateId || !templateSets) return null;
return templateSets[activeTemplateId] || null;
}, [activeTemplateId, templateSets]);
// Derive effective analysis config from template set, falling back to global config if needed
const activeAnalysisConfig = useMemo(() => {
if (activeTemplateSet) {
return {
...financialConfig,
analysis_modules: activeTemplateSet.modules,
};
}
return financialConfig; // Fallback to global config (legacy behavior)
}, [activeTemplateSet, financialConfig]);
// 分析类型列表
const analysisTypes = useMemo(() => {
if (!activeAnalysisConfig?.analysis_modules) return [];
return Object.keys(activeAnalysisConfig.analysis_modules);
}, [activeAnalysisConfig]);
// 分析状态管理
const [analysisStates, setAnalysisStates] = useState<Record<string, AnalysisState>>({});
const fullAnalysisTriggeredRef = useRef<boolean>(false);
const isAnalysisRunningRef = useRef<boolean>(false);
const analysisFetchedRefs = useRef<Record<string, boolean>>({});
const stopRequestedRef = useRef<boolean>(false);
const abortControllerRef = useRef<AbortController | null>(null);
const currentAnalysisTypeRef = useRef<string | null>(null);
const [manualRunKey, setManualRunKey] = useState(0);
// 当前正在执行的分析任务
const [currentAnalysisTask, setCurrentAnalysisTask] = useState<string | null>(null);
// 计时器状态
const [startTime, setStartTime] = useState<number | null>(null);
const [elapsedSeconds, setElapsedSeconds] = useState(0);
// 分析执行记录
const [analysisRecords, setAnalysisRecords] = useState<AnalysisRecord[]>([]);
// 新架构:触发分析与查看任务进度
const { trigger: triggerAnalysisRequest, isMutating: triggering } = useDataRequest();
const [requestId, setRequestId] = useState<string | null>(null);
const { progress: taskProgress } = useTaskProgress(requestId);
// 引入 Analysis Results 轮询
const { data: newAnalysisResults } = useAnalysisResults(unifiedSymbol);
// 1. Determine the Active Request ID (The one we want to display)
const activeRequestId = useMemo(() => {
// If the user manually triggered a task in this session, prioritize that
if (requestId) return requestId;
// Otherwise, default to the most recent result's request_id from the backend
// Assuming newAnalysisResults is sorted by created_at DESC
if (newAnalysisResults && newAnalysisResults.length > 0) {
return newAnalysisResults[0].request_id;
}
return null;
}, [requestId, newAnalysisResults]);
// 2. Filter results for the current batch
const currentBatchResults = useMemo(() => {
if (!newAnalysisResults || !activeRequestId) return [];
return newAnalysisResults.filter(r => r.request_id === activeRequestId);
}, [newAnalysisResults, activeRequestId]);
// 3. Sync analysisStates (Content) from current batch
// We only update if we have a result for that module in the current batch.
// If not, we leave it as is (or could clear it if we wanted strict mode).
// For now, we'll update based on what we find.
useEffect(() => {
if (!currentBatchResults) return;
setAnalysisStates(prev => {
const next = { ...prev };
let hasChanges = false;
currentBatchResults.forEach(result => {
const type = result.module_id;
const status = result.meta_data?.status || 'success';
const content = result.content;
const currentState = next[type];
// Only update if content changed or status changed
if (
!currentState ||
currentState.content !== content ||
(status === 'processing' && !currentState.loading) ||
(status === 'success' && currentState.loading) ||
(status === 'error' && !currentState.error)
) {
next[type] = {
content: content,
loading: status === 'processing',
error: status === 'error' ? result.meta_data?.error || 'Unknown error' : null,
};
hasChanges = true;
}
});
return hasChanges ? next : prev;
});
}, [currentBatchResults]);
// 4. Sync analysisRecords (Execution Details) from current batch
// This ensures Execution Details only shows the relevant modules for the current run.
useEffect(() => {
if (!currentBatchResults) return;
// If we are starting a new run (triggered), we might want to reset records initially?
// But currentBatchResults will eventually populate.
const records: AnalysisRecord[] = currentBatchResults.map(r => {
const statusStr = r.meta_data?.status;
let status: 'pending' | 'running' | 'done' | 'error' = 'done';
if (statusStr === 'processing') status = 'running';
else if (statusStr === 'error') status = 'error';
return {
type: r.module_id,
name: activeAnalysisConfig?.analysis_modules?.[r.module_id]?.name || r.module_id,
status: status,
duration_ms: r.meta_data?.elapsed_ms, // Backend needs to provide this in meta_data
error: r.meta_data?.error,
tokens: r.meta_data?.tokens // Backend needs to provide this
};
});
// Sort records to match the defined order in activeAnalysisConfig if possible
const definedOrder = Object.keys(activeAnalysisConfig?.analysis_modules || {});
records.sort((a, b) => {
const idxA = definedOrder.indexOf(a.type);
const idxB = definedOrder.indexOf(b.type);
if (idxA === -1) return 1;
if (idxB === -1) return -1;
return idxA - idxB;
});
setAnalysisRecords(records);
}, [currentBatchResults, activeAnalysisConfig]);
// 计算完成比例
const completionProgress = useMemo(() => {
const totalTasks = analysisRecords.length;
if (totalTasks === 0) return 0;
const completedTasks = analysisRecords.filter(r => r.status === 'done' || r.status === 'error').length;
return (completedTasks / totalTasks) * 100;
}, [analysisRecords]);
// 总耗时ms
const totalElapsedMs = useMemo(() => {
const finMs = financials?.meta?.elapsed_ms || 0;
const analysesMs = analysisRecords.reduce((sum, r) => sum + (r.duration_ms || 0), 0);
return finMs + analysesMs;
}, [financials?.meta?.elapsed_ms, analysisRecords]);
const hasRunningTask = useMemo(() => {
if (currentAnalysisTask !== null) return true;
// Also check analysisRecords derived from backend
if (analysisRecords.some(r => r.status === 'running')) return true;
return false;
}, [currentAnalysisTask, analysisRecords]);
// 全部任务是否完成
const allTasksCompleted = useMemo(() => {
if (analysisRecords.length === 0) return false;
const allDoneOrErrored = analysisRecords.every(r => r.status === 'done' || r.status === 'error');
return allDoneOrErrored && !hasRunningTask && currentAnalysisTask === null;
}, [analysisRecords, hasRunningTask, currentAnalysisTask]);
// 所有任务完成时,停止计时器
useEffect(() => {
if (allTasksCompleted) {
setStartTime(null);
}
}, [allTasksCompleted]);
useEffect(() => {
if (!startTime) return;
const interval = setInterval(() => {
const now = Date.now();
const elapsed = Math.floor((now - startTime) / 1000);
setElapsedSeconds(elapsed);
}, 1000);
return () => clearInterval(interval);
}, [startTime]);
const retryAnalysis = async (analysisType: string) => {
// Retry logic is complicated with the new backend-driven approach.
// Ideally, we should send a backend command to retry a specific module.
// For now, we can just re-trigger the whole template or alert the user.
// Or implementation TODO: Single module retry endpoint.
alert("单个模块重试功能在新架构中尚未就绪,请重新触发完整分析。");
};
const stopAll = () => {
// Clean up client-side state
stopRequestedRef.current = true;
isAnalysisRunningRef.current = false;
setStartTime(null);
// Ideally call backend to cancel job
};
const continuePending = () => {
// No-op in new architecture basically
};
const triggerAnalysis = async () => {
const reqId = await triggerAnalysisRequest(unifiedSymbol, normalizedMarket || '', selectedTemplateId);
if (reqId) {
setRequestId(reqId);
setStartTime(Date.now()); // Start timer
// Reset records to empty or wait for poll?
// Waiting for poll is safer to avoid flashing old data
setAnalysisRecords([]);
}
};
return {
activeAnalysisConfig, // Exported
analysisTypes,
analysisStates,
analysisRecords,
currentAnalysisTask,
triggerAnalysis,
triggering,
requestId,
setRequestId,
taskProgress,
startTime,
elapsedSeconds,
completionProgress,
totalElapsedMs,
stopAll,
continuePending,
retryAnalysis,
hasRunningTask,
isAnalysisRunning: hasRunningTask, // Simplified
selectedTemplateId, // Exported
setSelectedTemplateId, // Exported
};
}

View File

@ -14,8 +14,16 @@ const nextConfig = {
proxyTimeout: 300000, // 300 seconds (5 minutes) proxyTimeout: 300000, // 300 seconds (5 minutes)
}, },
// Optimize for Docker deployment only in production // Optimize for Docker deployment only in production
// 当 NODE_ENV 为 production 时开启 standalone 模式
output: process.env.NODE_ENV === 'production' ? 'standalone' : undefined, output: process.env.NODE_ENV === 'production' ? 'standalone' : undefined,
async rewrites() {
return [
{
source: '/api/:path*',
destination: 'http://api-gateway:4000/v1/:path*',
},
];
},
}; };
export default nextConfig; export default nextConfig;

View File

@ -1,29 +0,0 @@
import { NextRequest } from 'next/server';
const BACKEND_BASE = process.env.BACKEND_INTERNAL_URL || process.env.NEXT_PUBLIC_BACKEND_URL;
export async function GET(req: NextRequest) {
if (!BACKEND_BASE) {
return new Response('BACKEND_INTERNAL_URL/NEXT_PUBLIC_BACKEND_URL 未配置', { status: 500 });
}
const { searchParams } = new URL(req.url);
const symbol = searchParams.get('symbol');
if (!symbol) {
return new Response('Missing symbol parameter', { status: 400 });
}
const resp = await fetch(`${BACKEND_BASE}/analysis-results?symbol=${encodeURIComponent(symbol)}`, { cache: 'no-store' });
if (!resp.ok) {
if (resp.status === 404) {
// Return empty list if not found, to avoid UI errors
return Response.json([]);
}
return new Response(resp.statusText, { status: resp.status });
}
const data = await resp.json();
return Response.json(data);
}

View File

@ -1,23 +0,0 @@
const BACKEND_BASE = process.env.BACKEND_INTERNAL_URL || process.env.NEXT_PUBLIC_BACKEND_URL;
export async function GET(
_req: Request,
context: { params: Promise<{ symbol: string }> }
) {
if (!BACKEND_BASE) {
return new Response('BACKEND_INTERNAL_URL/NEXT_PUBLIC_BACKEND_URL 未配置', { status: 500 });
}
const { symbol } = await context.params;
const target = `${BACKEND_BASE}/companies/${encodeURIComponent(symbol)}/profile`;
const resp = await fetch(target, { headers: { 'Content-Type': 'application/json' } });
const headers = new Headers();
const contentType = resp.headers.get('content-type') || 'application/json; charset=utf-8';
headers.set('content-type', contentType);
const cacheControl = resp.headers.get('cache-control');
if (cacheControl) headers.set('cache-control', cacheControl);
const xAccelBuffering = resp.headers.get('x-accel-buffering');
if (xAccelBuffering) headers.set('x-accel-buffering', xAccelBuffering);
return new Response(resp.body, { status: resp.status, headers });
}

View File

@ -1,75 +0,0 @@
import { NextRequest } from 'next/server';
const BACKEND_BASE = process.env.BACKEND_INTERNAL_URL || process.env.NEXT_PUBLIC_BACKEND_URL;
// 聚合新后端的配置,提供给旧前端调用点一个稳定入口
export async function GET() {
if (!BACKEND_BASE) {
return new Response('BACKEND_INTERNAL_URL/NEXT_PUBLIC_BACKEND_URL 未配置', { status: 500 });
}
try {
const [providersResp, modulesResp] = await Promise.all([
fetch(`${BACKEND_BASE}/configs/llm_providers`, { cache: 'no-store' }),
fetch(`${BACKEND_BASE}/configs/analysis_modules`, { cache: 'no-store' }),
]);
const providersText = await providersResp.text();
const modulesText = await modulesResp.text();
let providers: unknown = {};
let modules: unknown = {};
try { providers = providersText ? JSON.parse(providersText) : {}; } catch { providers = {}; }
try { modules = modulesText ? JSON.parse(modulesText) : {}; } catch { modules = {}; }
return Response.json({
llm_providers: providers,
analysis_modules: modules,
});
} catch (e: any) {
return new Response(e?.message || 'Failed to load config', { status: 502 });
}
}
// 允许前端一次性提交部分配置;根据键路由到新后端
export async function PUT(req: NextRequest) {
if (!BACKEND_BASE) {
return new Response('BACKEND_INTERNAL_URL/NEXT_PUBLIC_BACKEND_URL 未配置', { status: 500 });
}
try {
const incoming = await req.json().catch(() => ({}));
const tasks: Promise<Response>[] = [];
if (incoming.llm_providers) {
tasks.push(fetch(`${BACKEND_BASE}/configs/llm_providers`, {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(incoming.llm_providers),
}));
}
if (incoming.analysis_modules) {
tasks.push(fetch(`${BACKEND_BASE}/configs/analysis_modules`, {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(incoming.analysis_modules),
}));
}
const results = await Promise.all(tasks);
const ok = results.every(r => r.ok);
if (!ok) {
const texts = await Promise.all(results.map(r => r.text().catch(() => '')));
return new Response(JSON.stringify({ error: 'Partial update failed', details: texts }), {
status: 502,
headers: { 'Content-Type': 'application/json' },
});
}
// 返回最新聚合
const [providersResp, modulesResp] = await Promise.all([
fetch(`${BACKEND_BASE}/configs/llm_providers`, { cache: 'no-store' }),
fetch(`${BACKEND_BASE}/configs/analysis_modules`, { cache: 'no-store' }),
]);
const providers = await providersResp.json().catch(() => ({}));
const modules = await modulesResp.json().catch(() => ({}));
return Response.json({
llm_providers: providers,
analysis_modules: modules,
});
} catch (e: any) {
return new Response(e?.message || 'Failed to update config', { status: 502 });
}
}

View File

@ -1,45 +0,0 @@
import { NextRequest } from 'next/server';
const BACKEND_BASE = process.env.BACKEND_INTERNAL_URL || process.env.NEXT_PUBLIC_BACKEND_URL;
export async function POST(req: NextRequest) {
if (!BACKEND_BASE) {
return new Response('BACKEND_INTERNAL_URL/NEXT_PUBLIC_BACKEND_URL 未配置', { status: 500 });
}
try {
const body = await req.json();
const { type, data } = body;
if (!type || !data) {
return new Response('请求体必须包含 type 和 data', { status: 400 });
}
// 将请求转发到 API Gateway
const targetUrl = `${BACKEND_BASE.replace(/\/$/, '')}/configs/test`;
const backendRes = await fetch(targetUrl, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({ type, ...data }), // 转发时将 data 字段展开
});
const backendResBody = await backendRes.text();
return new Response(backendResBody, {
status: backendRes.status,
headers: {
'Content-Type': 'application/json',
},
});
} catch (error: any) {
console.error('配置测试代理失败:', error);
return new Response(JSON.stringify({ success: false, message: error.message || '代理请求时发生未知错误' }), {
status: 500,
headers: { 'Content-Type': 'application/json' },
});
}
}

View File

@ -1,34 +0,0 @@
const BACKEND_BASE = process.env.BACKEND_INTERNAL_URL || process.env.NEXT_PUBLIC_BACKEND_URL;
export async function GET() {
if (!BACKEND_BASE) {
return new Response('NEXT_PUBLIC_BACKEND_URL 未配置', { status: 500 });
}
const resp = await fetch(`${BACKEND_BASE}/configs/analysis_modules`, {
headers: { 'Content-Type': 'application/json' },
cache: 'no-store',
});
const text = await resp.text();
return new Response(text, {
status: resp.status,
headers: { 'Content-Type': resp.headers.get('Content-Type') || 'application/json' },
});
}
export async function PUT(req: Request) {
if (!BACKEND_BASE) {
return new Response('NEXT_PUBLIC_BACKEND_URL 未配置', { status: 500 });
}
const body = await req.text();
const resp = await fetch(`${BACKEND_BASE}/configs/analysis_modules`, {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body,
});
const text = await resp.text();
return new Response(text, {
status: resp.status,
headers: { 'Content-Type': resp.headers.get('Content-Type') || 'application/json' },
});
}

View File

@ -1,45 +0,0 @@
const BACKEND_BASE = process.env.BACKEND_INTERNAL_URL || process.env.NEXT_PUBLIC_BACKEND_URL;
export async function GET() {
if (!BACKEND_BASE) {
return new Response('NEXT_PUBLIC_BACKEND_URL 未配置', { status: 500 });
}
try {
const resp = await fetch(`${BACKEND_BASE}/configs/analysis_template_sets`, {
headers: { 'Content-Type': 'application/json' },
cache: 'no-store',
});
const text = await resp.text();
return new Response(text, {
status: resp.status,
headers: { 'Content-Type': resp.headers.get('Content-Type') || 'application/json' },
});
} catch (e: any) {
const errorBody = JSON.stringify({ message: e?.message || '连接后端失败' });
return new Response(errorBody, { status: 502, headers: { 'Content-Type': 'application/json' } });
}
}
export async function PUT(req: Request) {
if (!BACKEND_BASE) {
return new Response('NEXT_PUBLIC_BACKEND_URL 未配置', { status: 500 });
}
const body = await req.text();
try {
const resp = await fetch(`${BACKEND_BASE}/configs/analysis_template_sets`, {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body,
});
const text = await resp.text();
return new Response(text, {
status: resp.status,
headers: { 'Content-Type': resp.headers.get('Content-Type') || 'application/json' },
});
} catch (e: any) {
const errorBody = JSON.stringify({ message: e?.message || '连接后端失败' });
return new Response(errorBody, { status: 502, headers: { 'Content-Type': 'application/json' } });
}
}

View File

@ -1,45 +0,0 @@
const BACKEND_BASE = process.env.BACKEND_INTERNAL_URL || process.env.NEXT_PUBLIC_BACKEND_URL;
export async function GET() {
if (!BACKEND_BASE) {
return new Response('NEXT_PUBLIC_BACKEND_URL 未配置', { status: 500 });
}
try {
const resp = await fetch(`${BACKEND_BASE}/configs/data_sources`, {
headers: { 'Content-Type': 'application/json' },
cache: 'no-store',
});
const text = await resp.text();
return new Response(text, {
status: resp.status,
headers: { 'Content-Type': resp.headers.get('Content-Type') || 'application/json' },
});
} catch (e: any) {
const errorBody = JSON.stringify({ message: e?.message || '连接后端失败' });
return new Response(errorBody, { status: 502, headers: { 'Content-Type': 'application/json' } });
}
}
export async function PUT(req: Request) {
if (!BACKEND_BASE) {
return new Response('NEXT_PUBLIC_BACKEND_URL 未配置', { status: 500 });
}
const body = await req.text();
try {
const resp = await fetch(`${BACKEND_BASE}/configs/data_sources`, {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body,
});
const text = await resp.text();
return new Response(text, {
status: resp.status,
headers: { 'Content-Type': resp.headers.get('Content-Type') || 'application/json' },
});
} catch (e: any) {
const errorBody = JSON.stringify({ message: e?.message || '连接后端失败' });
return new Response(errorBody, { status: 502, headers: { 'Content-Type': 'application/json' } });
}
}

View File

@ -1,41 +0,0 @@
import { NextRequest } from 'next/server';
const BACKEND_BASE = process.env.BACKEND_INTERNAL_URL || process.env.NEXT_PUBLIC_BACKEND_URL;
export async function POST(req: NextRequest) {
if (!BACKEND_BASE) {
return new Response('BACKEND_INTERNAL_URL/NEXT_PUBLIC_BACKEND_URL 未配置', { status: 500 });
}
try {
const body = await req.json();
// 将请求转发到 API Gateway
const targetUrl = `${BACKEND_BASE.replace(/\/$/, '')}/configs/llm/test`;
const backendRes = await fetch(targetUrl, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(body),
});
const backendResBody = await backendRes.text();
return new Response(backendResBody, {
status: backendRes.status,
headers: {
'Content-Type': 'application/json',
},
});
} catch (error: any) {
console.error('LLM测试代理失败:', error);
return new Response(JSON.stringify({ success: false, message: error.message || '代理请求时发生未知错误' }), {
status: 500,
headers: { 'Content-Type': 'application/json' },
});
}
}

View File

@ -1,44 +0,0 @@
const BACKEND_BASE = process.env.BACKEND_INTERNAL_URL || process.env.NEXT_PUBLIC_BACKEND_URL;
export async function GET() {
if (!BACKEND_BASE) {
return new Response('NEXT_PUBLIC_BACKEND_URL 未配置', { status: 500 });
}
try {
const resp = await fetch(`${BACKEND_BASE}/configs/llm_providers`, {
headers: { 'Content-Type': 'application/json' },
cache: 'no-store',
});
const text = await resp.text();
return new Response(text, {
status: resp.status,
headers: { 'Content-Type': resp.headers.get('Content-Type') || 'application/json' },
});
} catch (e: any) {
const errorBody = JSON.stringify({ message: e?.message || '连接后端失败' });
return new Response(errorBody, { status: 502, headers: { 'Content-Type': 'application/json' } });
}
}
export async function PUT(req: Request) {
if (!BACKEND_BASE) {
return new Response('NEXT_PUBLIC_BACKEND_URL 未配置', { status: 500 });
}
const body = await req.text();
try {
const resp = await fetch(`${BACKEND_BASE}/configs/llm_providers`, {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body,
});
const text = await resp.text();
return new Response(text, {
status: resp.status,
headers: { 'Content-Type': resp.headers.get('Content-Type') || 'application/json' },
});
} catch (e: any) {
const errorBody = JSON.stringify({ message: e?.message || '连接后端失败' });
return new Response(errorBody, { status: 502, headers: { 'Content-Type': 'application/json' } });
}
}

View File

@ -1,22 +0,0 @@
import { NextRequest } from 'next/server';
const BACKEND_BASE = process.env.BACKEND_INTERNAL_URL || process.env.NEXT_PUBLIC_BACKEND_URL;
export async function POST(req: NextRequest) {
if (!BACKEND_BASE) {
return new Response('BACKEND_INTERNAL_URL/NEXT_PUBLIC_BACKEND_URL 未配置', { status: 500 });
}
const body = await req.text();
const resp = await fetch(`${BACKEND_BASE}/data-requests`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body,
});
const text = await resp.text();
return new Response(text, {
status: resp.status,
headers: { 'Content-Type': resp.headers.get('Content-Type') || 'application/json' },
});
}

View File

@ -1,27 +0,0 @@
const BACKEND_BASE = process.env.BACKEND_INTERNAL_URL || process.env.NEXT_PUBLIC_BACKEND_URL;
export async function GET(
_req: Request,
context: any
) {
if (!BACKEND_BASE) {
return new Response('BACKEND_INTERNAL_URL/NEXT_PUBLIC_BACKEND_URL 未配置', { status: 500 });
}
const raw = context?.params;
const params = raw && typeof raw.then === 'function' ? await raw : raw;
const provider_id = params?.provider_id as string | undefined;
if (!provider_id) {
return new Response('provider_id 缺失', { status: 400 });
}
const target = `${BACKEND_BASE}/discover-models/${encodeURIComponent(provider_id)}`;
const resp = await fetch(target, {
headers: { 'Content-Type': 'application/json' },
cache: 'no-store',
});
const text = await resp.text();
return new Response(text, {
status: resp.status,
headers: { 'Content-Type': resp.headers.get('Content-Type') || 'application/json' },
});
}

View File

@ -1,26 +0,0 @@
const BACKEND_BASE = process.env.BACKEND_INTERNAL_URL || process.env.NEXT_PUBLIC_BACKEND_URL;
export async function POST(req: Request) {
if (!BACKEND_BASE) {
return new Response('BACKEND_INTERNAL_URL/NEXT_PUBLIC_BACKEND_URL 未配置', { status: 500 });
}
const body = await req.text();
try {
const resp = await fetch(`${BACKEND_BASE}/discover-models`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body,
cache: 'no-store',
});
const text = await resp.text();
return new Response(text, {
status: resp.status,
headers: { 'Content-Type': resp.headers.get('Content-Type') || 'application/json' },
});
} catch (e: any) {
const errorBody = JSON.stringify({ message: e?.message || '连接后端失败' });
return new Response(errorBody, { status: 502, headers: { 'Content-Type': 'application/json' } });
}
}

View File

@ -1,195 +0,0 @@
import { NextRequest } from 'next/server';
const BACKEND_BASE = process.env.BACKEND_INTERNAL_URL || process.env.NEXT_PUBLIC_BACKEND_URL;
const FRONTEND_BASE = process.env.FRONTEND_INTERNAL_URL || 'http://localhost:3001';
export async function GET(
req: NextRequest,
context: { params: Promise<{ slug: string[] }> }
) {
if (!BACKEND_BASE) {
return new Response('BACKEND_INTERNAL_URL/NEXT_PUBLIC_BACKEND_URL 未配置', { status: 500 });
}
const url = new URL(req.url);
const { slug } = await context.params;
const first = slug?.[0];
// 1. Match /api/financials/{market}/{symbol}
// slug[0] = market (e.g., "cn" or "us")
// slug[1] = symbol (e.g., "600519" or "AAPL")
if (slug.length === 2 && first !== 'analysis-config' && first !== 'config') {
const market = slug[0];
const symbol = slug[1];
const years = url.searchParams.get('years') || '10';
// Fetch financials from backend
// Corrected path to match new API Gateway route
const metricsParam = url.searchParams.get('metrics') || '';
const fetchUrl = `${BACKEND_BASE}/market-data/financial-statements/${encodeURIComponent(symbol)}` +
(metricsParam ? `?metrics=${encodeURIComponent(metricsParam)}` : '');
const finResp = await fetch(fetchUrl, { cache: 'no-store' });
if (!finResp.ok) {
if (finResp.status === 404) {
return Response.json({}, { status: 200 }); // Return empty for now to not break UI
}
return new Response(finResp.statusText, { status: finResp.status });
}
const series = await finResp.json();
// Transform to frontend expected format (BatchFinancialDataResponse)
// We group by metric_name
const groupedSeries: Record<string, any[]> = {};
series.forEach((item: any) => {
if (!groupedSeries[item.metric_name]) {
groupedSeries[item.metric_name] = [];
}
groupedSeries[item.metric_name].push({
period: item.period_date ? item.period_date.replace(/-/g, '') : null, // YYYY-MM-DD -> YYYYMMDD
value: item.value,
source: item.source
});
});
// Fetch Company Profile to populate name/industry
// Corrected path to match new API Gateway route
const profileResp = await fetch(`${BACKEND_BASE}/companies/${encodeURIComponent(symbol)}/profile`, { cache: 'no-store' });
let profileData: any = {};
if (profileResp.ok) {
profileData = await profileResp.json();
}
// Fetch Latest Analysis Result Metadata (to get template_id)
// We search for the most recent analysis result for this symbol
const analysisResp = await fetch(`${BACKEND_BASE}/analysis-results?symbol=${encodeURIComponent(symbol)}`, { cache: 'no-store' });
let meta: any = {
symbol: symbol,
generated_at: new Date().toISOString(), // Fallback
template_id: null // Explicitly null if not found
};
if (analysisResp.ok) {
const analysisList = await analysisResp.json();
if (Array.isArray(analysisList) && analysisList.length > 0) {
// Sort by created_at desc (backend should already do this, but to be safe)
// Backend returns sorted by created_at DESC
const latest = analysisList[0];
meta.template_id = latest.template_id || null;
meta.generated_at = latest.created_at;
}
}
const responsePayload = {
name: profileData.name || symbol,
symbol: symbol,
market: market,
series: groupedSeries,
meta: meta
};
return Response.json(responsePayload);
}
// 适配旧接口analysis-config → 新分析模块配置
if (first === 'analysis-config') {
const resp = await fetch(`${BACKEND_BASE}/configs/analysis_modules`, { cache: 'no-store' });
const text = await resp.text();
return new Response(text, {
status: resp.status,
headers: { 'Content-Type': resp.headers.get('Content-Type') || 'application/json' },
});
}
// 适配旧接口config → 聚合配置
if (first === 'config') {
const resp = await fetch(`${FRONTEND_BASE}/api/config`, { cache: 'no-store' });
const text = await resp.text();
return new Response(text, { status: resp.status, headers: { 'Content-Type': 'application/json' } });
}
// 2. Match /api/financials/{market}/{symbol}/analysis/{type}/stream
// slug length = 5
// slug[0] = market
// slug[1] = symbol
// slug[2] = 'analysis'
// slug[3] = analysisType (module_id)
// slug[4] = 'stream'
if (slug.length === 5 && slug[2] === 'analysis' && slug[4] === 'stream') {
const symbol = slug[1];
const analysisType = slug[3];
const encoder = new TextEncoder();
const stream = new ReadableStream({
async start(controller) {
// Polling logic
// We try for up to 60 seconds
const maxRetries = 30;
let found = false;
for (let i = 0; i < maxRetries; i++) {
try {
const resp = await fetch(`${BACKEND_BASE}/analysis-results?symbol=${encodeURIComponent(symbol)}&module_id=${encodeURIComponent(analysisType)}`, { cache: 'no-store' });
if (resp.ok) {
const results = await resp.json();
// Assuming results are sorted by created_at DESC (backend behavior)
if (Array.isArray(results) && results.length > 0) {
const latest = results[0];
// If result is found, send it and exit
if (latest && latest.content) {
controller.enqueue(encoder.encode(latest.content));
found = true;
break;
}
}
}
} catch (e) {
console.error("Error polling analysis results", e);
}
// Wait 2 seconds before next poll
await new Promise(resolve => setTimeout(resolve, 2000));
}
controller.close();
}
});
return new Response(stream, {
headers: {
'Content-Type': 'text/plain; charset=utf-8',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive'
}
});
}
// 其他旧 financials 端点在新架构中未实现:返回空对象以避免前端 JSON 解析错误
return Response.json({}, { status: 200 });
}
export async function PUT(
req: NextRequest,
context: { params: Promise<{ slug: string[] }> }
) {
if (!BACKEND_BASE) {
return new Response('BACKEND_INTERNAL_URL/NEXT_PUBLIC_BACKEND_URL 未配置', { status: 500 });
}
const { slug } = await context.params;
const first = slug?.[0];
if (first === 'analysis-config') {
const body = await req.text();
const resp = await fetch(`${BACKEND_BASE}/configs/analysis_modules`, {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body,
});
const text = await resp.text();
return new Response(text, {
status: resp.status,
headers: { 'Content-Type': resp.headers.get('Content-Type') || 'application/json' },
});
}
return new Response('Not Found', { status: 404 });
}

View File

@ -1,49 +0,0 @@
import { NextRequest } from 'next/server'
const BACKEND_BASE = process.env.BACKEND_INTERNAL_URL || process.env.NEXT_PUBLIC_BACKEND_URL;
export async function GET(
req: NextRequest,
context: { params: Promise<{ id: string }> }
) {
// 优先从动态路由 paramsPromise获取其次从 URL 最后一段兜底
let id: string | undefined
try {
const { id: idFromParams } = await context.params
id = idFromParams
} catch {
// ignore
}
if (!id) {
id = new URL(req.url).pathname.split('/').pop() || undefined
}
if (!id) {
return Response.json({ error: 'missing id' }, { status: 400 })
}
if (!BACKEND_BASE) {
return new Response('NEXT_PUBLIC_BACKEND_URL 未配置', { status: 500 });
}
const resp = await fetch(`${BACKEND_BASE}/analysis-results/${encodeURIComponent(id)}`);
const text = await resp.text();
if (!resp.ok) {
return new Response(text || 'not found', { status: resp.status });
}
// 将后端 DTOgenerated_at 等适配为前端旧结构字段createdAt
try {
const dto = JSON.parse(text);
const adapted = {
id: dto.id,
symbol: dto.symbol,
createdAt: dto.generated_at || dto.generatedAt || null,
content: dto.content,
module_id: dto.module_id,
model_name: dto.model_name,
meta_data: dto.meta_data,
};
return Response.json(adapted);
} catch {
return Response.json({ error: 'invalid response from backend' }, { status: 502 });
}
}

View File

@ -1,13 +0,0 @@
export const runtime = 'nodejs'
import { NextRequest } from 'next/server'
export async function GET(req: NextRequest) {
// 历史报告列表功能在新架构中由后端持久化服务统一提供。
// 当前网关未提供“全量列表”接口(需要 symbol 条件),因此此路由返回空集合。
return Response.json({ items: [], total: 0 }, { status: 200 });
}
export async function POST(req: NextRequest) {
// 新架构下,报告持久化由后端流水线/服务完成,此处不再直接创建。
return Response.json({ error: 'Not implemented: creation is handled by backend pipeline' }, { status: 501 });
}

View File

@ -1,23 +0,0 @@
const BACKEND_BASE = process.env.BACKEND_INTERNAL_URL || process.env.NEXT_PUBLIC_BACKEND_URL;
export async function GET(
_req: Request,
context: { params: Promise<{ request_id: string }> }
) {
if (!BACKEND_BASE) {
return new Response('BACKEND_INTERNAL_URL/NEXT_PUBLIC_BACKEND_URL 未配置', { status: 500 });
}
const { request_id } = await context.params;
const target = `${BACKEND_BASE}/tasks/${encodeURIComponent(request_id)}`;
const resp = await fetch(target, { headers: { 'Content-Type': 'application/json' } });
const headers = new Headers();
const contentType = resp.headers.get('content-type') || 'application/json; charset=utf-8';
headers.set('content-type', contentType);
const cacheControl = resp.headers.get('cache-control');
if (cacheControl) headers.set('cache-control', cacheControl);
const xAccelBuffering = resp.headers.get('x-accel-buffering');
if (xAccelBuffering) headers.set('x-accel-buffering', xAccelBuffering);
return new Response(resp.body, { status: resp.status, headers });
}

View File

@ -6,15 +6,53 @@ import { Button } from "@/components/ui/button";
import { Input } from "@/components/ui/input"; import { Input } from "@/components/ui/input";
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select"; import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select";
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card"; import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
import { Loader2 } from "lucide-react";
export default function StockInputForm() { export default function StockInputForm() {
const [symbol, setSymbol] = useState(''); const [symbol, setSymbol] = useState('');
const [market, setMarket] = useState('china'); const [market, setMarket] = useState('china');
const [isLoading, setIsLoading] = useState(false);
const [error, setError] = useState('');
const router = useRouter(); const router = useRouter();
const handleSearch = () => { const handleSearch = async (e?: React.FormEvent) => {
if (symbol.trim()) { if (e) {
router.push(`/report/${symbol.trim()}?market=${market}`); e.preventDefault();
}
if (!symbol.trim()) return;
setIsLoading(true);
setError('');
try {
// 1. 调用后端进行 Symbol 归一化,但不启动工作流
const response = await fetch('/api/tools/resolve-symbol', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
symbol: symbol.trim(),
market: market,
}),
});
if (!response.ok) {
const errText = await response.text();
throw new Error(errText || '解析股票代码失败');
}
const data = await response.json();
// data 结构: { symbol: string, market: string }
// 2. 跳转到报告页面,仅携带归一化后的 Symbol
// 此时并没有 request_id所以详情页不会自动开始而是等待用户点击
router.push(`/report/${encodeURIComponent(data.symbol)}?market=${data.market}`);
} catch (err) {
console.error(err);
setError(err instanceof Error ? err.message : '操作失败,请重试');
setIsLoading(false);
} }
}; };
@ -25,18 +63,20 @@ export default function StockInputForm() {
<CardTitle></CardTitle> <CardTitle></CardTitle>
<CardDescription></CardDescription> <CardDescription></CardDescription>
</CardHeader> </CardHeader>
<CardContent className="space-y-4"> <CardContent>
<form onSubmit={handleSearch} className="space-y-4">
<div className="space-y-2"> <div className="space-y-2">
<label></label> <label></label>
<Input <Input
placeholder="例如: 600519.SH 或 AAPL" placeholder="例如: 600519 或 AAPL"
value={symbol} value={symbol}
onChange={(e) => setSymbol(e.target.value)} onChange={(e) => setSymbol(e.target.value)}
disabled={isLoading}
/> />
</div> </div>
<div className="space-y-2"> <div className="space-y-2">
<label></label> <label></label>
<Select value={market} onValueChange={setMarket}> <Select value={market} onValueChange={setMarket} disabled={isLoading}>
<SelectTrigger> <SelectTrigger>
<SelectValue /> <SelectValue />
</SelectTrigger> </SelectTrigger>
@ -48,7 +88,24 @@ export default function StockInputForm() {
</SelectContent> </SelectContent>
</Select> </Select>
</div> </div>
<Button onClick={handleSearch} className="w-full"></Button>
{error && (
<div className="text-sm text-red-500 font-medium">
{error}
</div>
)}
<Button type="submit" className="w-full" disabled={isLoading || !symbol.trim()}>
{isLoading ? (
<>
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
...
</>
) : (
'生成报告'
)}
</Button>
</form>
</CardContent> </CardContent>
</Card> </Card>
</div> </div>

View File

@ -0,0 +1,142 @@
import React, { useState, useEffect } from 'react';
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
import { Tabs, TabsList, TabsTrigger, TabsContent } from "@/components/ui/tabs";
import { ScrollArea } from "@/components/ui/scroll-area";
import { Badge } from "@/components/ui/badge";
import ReactMarkdown from 'react-markdown';
import remarkGfm from 'remark-gfm';
import { TaskStatus } from '@/types/workflow';
import { AnalysisModuleConfig } from '@/types/index';
import { BrainCircuit, Terminal } from 'lucide-react';
interface AnalysisModulesViewProps {
taskStates: Record<string, TaskStatus>;
taskOutputs: Record<string, string>;
modulesConfig: Record<string, AnalysisModuleConfig>;
}
export function AnalysisModulesView({
taskStates,
taskOutputs,
modulesConfig
}: AnalysisModulesViewProps) {
// Identify analysis tasks based on the template config
// We assume task IDs in the DAG correspond to module IDs or follow a pattern
// For now, let's try to match tasks that are NOT fetch tasks
// If we have config, use it to drive tabs
const moduleIds = Object.keys(modulesConfig);
const [activeModuleId, setActiveModuleId] = useState<string>(moduleIds[0] || '');
useEffect(() => {
// If no active module and we have modules, select first
if (!activeModuleId && moduleIds.length > 0) {
setActiveModuleId(moduleIds[0]);
}
}, [moduleIds, activeModuleId]);
if (moduleIds.length === 0) {
return (
<div className="flex flex-col items-center justify-center h-[300px] border-dashed border-2 rounded-lg text-muted-foreground">
<BrainCircuit className="w-10 h-10 mb-2 opacity-50" />
<p>No analysis modules defined in this template.</p>
</div>
);
}
return (
<div className="space-y-4">
<Tabs value={activeModuleId} onValueChange={setActiveModuleId} className="w-full">
<div className="overflow-x-auto pb-2">
<TabsList className="w-full justify-start h-auto p-1 bg-transparent gap-2">
{moduleIds.map(moduleId => {
const config = modulesConfig[moduleId];
// Task ID might match module ID directly or be prefixed
// Heuristic: check exact match first
const taskId = moduleId;
const status = taskStates[taskId] || 'pending';
return (
<TabsTrigger
key={moduleId}
value={moduleId}
className="data-[state=active]:bg-primary data-[state=active]:text-primary-foreground px-4 py-2 rounded-md border bg-card hover:bg-accent/50 transition-all"
>
<div className="flex items-center gap-2">
<span>{config.name}</span>
<StatusDot status={status} />
</div>
</TabsTrigger>
);
})}
</TabsList>
</div>
{moduleIds.map(moduleId => {
const taskId = moduleId;
const output = taskOutputs[taskId] || '';
const status = taskStates[taskId] || 'pending';
const config = modulesConfig[moduleId];
return (
<TabsContent key={moduleId} value={moduleId} className="mt-0">
<Card className="h-[600px] flex flex-col">
<CardHeader className="py-4 border-b bg-muted/5">
<div className="flex items-center justify-between">
<div className="flex items-center gap-3">
<CardTitle className="text-lg">{config.name}</CardTitle>
<Badge variant="outline" className="font-mono text-xs">
{config.model_id}
</Badge>
</div>
<StatusBadge status={status} />
</div>
</CardHeader>
<CardContent className="flex-1 p-0 min-h-0 relative">
<ScrollArea className="h-full p-6">
{output ? (
<div className="prose dark:prose-invert max-w-none pb-10">
<ReactMarkdown remarkPlugins={[remarkGfm]}>
{output}
</ReactMarkdown>
</div>
) : (
<div className="flex flex-col items-center justify-center h-full text-muted-foreground gap-2 opacity-50">
<Terminal className="w-8 h-8" />
<p>{status === 'running' ? 'Generating analysis...' : 'Waiting for input...'}</p>
</div>
)}
</ScrollArea>
</CardContent>
</Card>
</TabsContent>
);
})}
</Tabs>
</div>
);
}
function StatusDot({ status }: { status: TaskStatus }) {
let colorClass = "bg-muted";
if (status === 'completed') colorClass = "bg-green-500";
if (status === 'failed') colorClass = "bg-red-500";
if (status === 'running') colorClass = "bg-blue-500 animate-pulse";
return <div className={`w-2 h-2 rounded-full ${colorClass}`} />;
}
function StatusBadge({ status }: { status: TaskStatus }) {
switch (status) {
case 'completed':
return <Badge variant="outline" className="text-green-600 border-green-200 bg-green-50">Completed</Badge>;
case 'failed':
return <Badge variant="destructive">Failed</Badge>;
case 'running':
return <Badge variant="secondary" className="text-blue-600 bg-blue-50 animate-pulse">Generating...</Badge>;
default:
return <Badge variant="outline">Pending</Badge>;
}
}

View File

@ -0,0 +1,50 @@
import React from 'react';
import ReactMarkdown from 'react-markdown';
import remarkGfm from 'remark-gfm';
import { AnalysisResultDto, AnalysisModuleConfig } from '@/types';
import { Card, CardContent, CardHeader, CardTitle, CardDescription } from "@/components/ui/card";
import { Badge } from "@/components/ui/badge";
interface AnalysisViewerProps {
result?: AnalysisResultDto;
config: AnalysisModuleConfig;
isActive: boolean;
}
export function AnalysisViewer({ result, config, isActive: _isActive }: AnalysisViewerProps) {
if (!result) {
return (
<div className="flex flex-col items-center justify-center h-[300px] text-muted-foreground border rounded-lg border-dashed gap-2">
<p>Waiting for analysis...</p>
<Badge variant="outline">{config.name}</Badge>
</div>
);
}
return (
<Card className="border-none shadow-none">
<CardHeader className="px-0 pt-0">
<div className="flex items-center justify-between">
<CardTitle className="text-lg">{config.name}</CardTitle>
<Badge variant="secondary" className="font-mono text-xs">
{config.model_id}
</Badge>
</div>
<CardDescription>
Generated by {config.provider_id}
</CardDescription>
</CardHeader>
<CardContent className="px-0">
<div className="prose prose-sm dark:prose-invert max-w-none">
<ReactMarkdown remarkPlugins={[remarkGfm]}>
{result.content}
</ReactMarkdown>
{/* Simple cursor effect if we think it's still streaming (we don't have explicit stream status per module here easily without more props, but this is fine for now) */}
{/* You could add a blinking cursor here if needed, but maybe overkill if we don't know for sure if it's done */}
</div>
</CardContent>
</Card>
);
}

View File

@ -1,569 +1,99 @@
import { useMemo } from 'react'; import React, { useMemo } from 'react';
import { Spinner } from '@/components/ui/spinner'; import {
import { CheckCircle, XCircle } from 'lucide-react'; Table,
import { Table, TableHeader, TableBody, TableHead, TableRow, TableCell } from '@/components/ui/table'; TableBody,
import { formatReportPeriod } from '@/lib/financial-utils'; TableCell,
import { numberFormatter, integerFormatter } from '../utils'; TableHead,
TableHeader,
TableRow,
} from "@/components/ui/table";
import { transformFinancialData, TimeSeriesFinancialDto } from '@/lib/financial-data-transformer';
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
import { Loader2, AlertCircle } from "lucide-react";
interface FinancialTableProps { interface FinancialTableProps {
financials: any; data: unknown[];
isLoading: boolean; status: 'idle' | 'fetching' | 'complete' | 'error';
error: any;
financialConfig: any;
} }
export function FinancialTable({ financials, isLoading, error, financialConfig }: FinancialTableProps) { export function FinancialTable({ data, status }: FinancialTableProps) {
// 创建 tushareParam 到 displayText 的映射 const tableData = useMemo(() => {
const metricDisplayMap = useMemo(() => { if (!Array.isArray(data)) return { headers: [], rows: [] };
if (!financialConfig?.api_groups) return {}; return transformFinancialData(data as TimeSeriesFinancialDto[]);
}, [data]);
const map: Record<string, string> = {}; if (status === 'idle') {
const groups = Object.values((financialConfig as any).api_groups || {}) as any[][]; return (
groups.forEach((metrics) => { <div className="flex items-center justify-center h-[200px] text-muted-foreground border rounded-lg border-dashed">
(metrics || []).forEach((metric: any) => { Waiting to start analysis...
if (metric.tushareParam && metric.displayText) { </div>
map[metric.tushareParam] = metric.displayText; );
} }
});
});
return map;
}, [financialConfig]);
const metricGroupMap = useMemo(() => { if (status === 'fetching') {
if (!financialConfig?.api_groups) return {} as Record<string, string>; return (
const map: Record<string, string> = {}; <div className="flex flex-col items-center justify-center h-[300px] text-muted-foreground border rounded-lg gap-4">
const entries = Object.entries((financialConfig as any).api_groups || {}) as [string, any[]][]; <Loader2 className="h-8 w-8 animate-spin" />
entries.forEach(([groupName, metrics]) => { <p>Fetching financial data from Tushare, YFinance, and AlphaVantage...</p>
(metrics || []).forEach((metric: any) => { </div>
if (metric.tushareParam) { );
map[metric.tushareParam] = groupName; }
if (status === 'error') {
return (
<div className="flex items-center justify-center h-[200px] text-destructive border rounded-lg border-destructive/50 bg-destructive/10 gap-2">
<AlertCircle className="h-5 w-5" />
<span>Error fetching data. Please try again.</span>
</div>
);
}
if (tableData.rows.length === 0) {
return (
<div className="flex items-center justify-center h-[200px] text-muted-foreground border rounded-lg">
No financial data available.
</div>
);
} }
});
});
return map;
}, [financialConfig]);
return ( return (
<div className="space-y-4"> <Card>
<h2 className="text-lg font-medium"></h2> <CardHeader>
<div className="flex items-center gap-3 text-sm"> <CardTitle>Financial Statements</CardTitle>
{isLoading ? ( </CardHeader>
<Spinner className="size-4" /> <CardContent>
) : error ? ( <div className="rounded-md border">
<XCircle className="size-4 text-red-500" /> <Table>
) : (
<CheckCircle className="size-4 text-green-600" />
)}
<div className="text-muted-foreground">
{isLoading ? '正在读取数据…' : error ? '读取失败' : '读取完成'}
</div>
</div>
{error && <p className="text-red-500"></p>}
{isLoading && (
<div className="flex items-center gap-2">
<span className="text-xs text-muted-foreground"></span>
<Spinner className="size-4" />
</div>
)}
{financials && (
<div className="overflow-x-auto">
{(() => {
const series = financials?.series ?? {};
// 统一 period优先 p.period若仅有 year 则映射到 `${year}1231`
const toPeriod = (p: any): string | null => {
if (!p) return null;
if (p.period) return String(p.period);
if (p.year) return `${p.year}1231`;
return null;
};
const displayedKeys = [
'roe', 'roa', 'roic', 'grossprofit_margin', 'netprofit_margin', 'revenue', 'tr_yoy', 'n_income',
'dt_netprofit_yoy', 'n_cashflow_act', 'c_pay_acq_const_fiolta', '__free_cash_flow',
'dividend_amount', 'repurchase_amount', 'total_assets', 'total_hldr_eqy_exc_min_int', 'goodwill',
'__sell_rate', '__admin_rate', '__rd_rate', '__other_fee_rate', '__tax_rate', '__depr_ratio',
'__money_cap_ratio', '__inventories_ratio', '__ar_ratio', '__prepay_ratio', '__fix_assets_ratio',
'__lt_invest_ratio', '__goodwill_ratio', '__other_assets_ratio', '__ap_ratio', '__adv_ratio',
'__st_borr_ratio', '__lt_borr_ratio', '__operating_assets_ratio', '__interest_bearing_debt_ratio',
'invturn_days', 'arturn_days', 'payturn_days', 'fa_turn', 'assets_turn',
'employees', '__rev_per_emp', '__profit_per_emp', '__salary_per_emp',
'close', 'total_mv', 'pe', 'pb', 'holder_num'
];
const displayedSeries = Object.entries(series)
.filter(([key]) => displayedKeys.includes(key))
.map(([, value]) => value);
const allPeriods = Array.from(
new Set(
(displayedSeries.flat() as any[])
.map((p) => toPeriod(p))
.filter((v): v is string => Boolean(v))
)
).sort((a, b) => b.localeCompare(a)); // 最新在左(按 YYYYMMDD 排序)
if (allPeriods.length === 0) {
return <p className="text-sm text-muted-foreground"></p>;
}
const periods = allPeriods.slice(0, 10);
const getValueByPeriod = (points: any[] | undefined, period: string): number | null => {
if (!points) return null;
const hit = points.find((pp) => toPeriod(pp) === period);
const v = hit?.value;
if (v == null) return null;
const num = typeof v === 'number' ? v : Number(v);
return Number.isFinite(num) ? num : null;
};
return (
<Table className="min-w-full text-sm">
<TableHeader> <TableHeader>
<TableRow> <TableRow>
<TableHead className="text-left p-2"></TableHead> <TableHead className="w-[200px]">Metric</TableHead>
{periods.map((p) => ( {tableData.headers.map(year => (
<TableHead key={p} className="text-right p-2">{formatReportPeriod(p)}</TableHead> <TableHead key={year} className="text-right">{year}</TableHead>
))} ))}
</TableRow> </TableRow>
</TableHeader> </TableHeader>
<TableBody> <TableBody>
{(() => { {tableData.rows.map((row, idx) => (
// 指定显示顺序tushareParam <TableRow key={idx}>
const ORDER: Array<{ key: string; label?: string }> = [ <TableCell className="font-medium">{row.metric}</TableCell>
{ key: 'roe' }, {tableData.headers.map(year => (
{ key: 'roa' }, <TableCell key={year} className="text-right">
{ key: 'roic' }, {row[year] !== undefined
{ key: 'grossprofit_margin' }, ? typeof row[year] === 'number'
{ key: 'netprofit_margin' }, ? (row[year] as number).toLocaleString(undefined, { maximumFractionDigits: 2 })
{ key: 'revenue' }, : row[year]
{ key: 'tr_yoy' }, : '-'}
{ key: 'n_income' }, </TableCell>
{ key: 'dt_netprofit_yoy' },
{ key: 'n_cashflow_act' },
{ key: 'c_pay_acq_const_fiolta' },
{ key: '__free_cash_flow', label: '自由现金流' },
{ key: 'dividend_amount', label: '分红' },
{ key: 'repurchase_amount', label: '回购' },
{ key: 'total_assets' },
{ key: 'total_hldr_eqy_exc_min_int' },
{ key: 'goodwill' },
];
// 在表格顶部插入"主要指标"行
const summaryRow = (
<TableRow key="__main_metrics_row" className="bg-muted hover:bg-purple-100">
<TableCell className="p-2 font-medium "></TableCell>
{periods.map((p) => (
<TableCell key={p} className="p-2"></TableCell>
))} ))}
</TableRow> </TableRow>
);
const PERCENT_KEYS = new Set([
'roe','roa','roic','grossprofit_margin','netprofit_margin','tr_yoy','dt_netprofit_yoy',
// Add all calculated percentage rows
'__sell_rate', '__admin_rate', '__rd_rate', '__other_fee_rate', '__tax_rate', '__depr_ratio',
'__money_cap_ratio', '__inventories_ratio', '__ar_ratio', '__prepay_ratio',
'__fix_assets_ratio', '__lt_invest_ratio', '__goodwill_ratio', '__other_assets_ratio',
'__ap_ratio', '__adv_ratio', '__st_borr_ratio', '__lt_borr_ratio',
'__operating_assets_ratio', '__interest_bearing_debt_ratio'
]);
const rows = ORDER.map(({ key, label }) => {
const points = series[key] as any[] | undefined;
return (
<TableRow key={key} className="hover:bg-purple-100">
<TableCell className="p-2 text-muted-foreground">
{label || metricDisplayMap[key] || key}
</TableCell>
{periods.map((p) => {
const v = getValueByPeriod(points, p);
const groupName = metricGroupMap[key];
const rawNum = typeof v === 'number' ? v : (v == null ? null : Number(v));
if (rawNum == null || Number.isNaN(rawNum)) {
return <TableCell key={p} className="text-right p-2">-</TableCell>;
}
if (PERCENT_KEYS.has(key)) {
const perc = Math.abs(rawNum) <= 1 && key !== 'tax_to_ebt' && key !== '__tax_rate' ? rawNum * 100 : rawNum;
const text = Number.isFinite(perc) ? numberFormatter.format(perc) : '-';
const isGrowthRow = key === 'tr_yoy' || key === 'dt_netprofit_yoy';
if (isGrowthRow) {
const isNeg = typeof perc === 'number' && perc < 0;
const isHighGrowth = typeof perc === 'number' && perc > 30;
let content = `${text}%`;
if (key === 'dt_netprofit_yoy' && typeof perc === 'number' && perc > 1000) {
content = `${(perc / 100).toFixed(1)}x`;
}
let tableCellClassName = 'text-right p-2';
let spanClassName = 'italic';
if (isNeg) {
tableCellClassName += ' bg-red-100';
spanClassName += ' text-red-600';
} else if (isHighGrowth) {
tableCellClassName += ' bg-green-100';
spanClassName += ' text-green-800 font-bold';
} else {
spanClassName += ' text-blue-600';
}
return (
<TableCell key={p} className={tableCellClassName}>
<span className={spanClassName}>{content}</span>
</TableCell>
);
}
const isHighlighted = (key === 'roe' && typeof perc === 'number' && perc > 12.5) ||
(key === 'grossprofit_margin' && typeof perc === 'number' && perc > 35) ||
(key === 'netprofit_margin' && typeof perc === 'number' && perc > 15);
if (isHighlighted) {
return (
<TableCell key={p} className="text-right p-2 bg-green-100 text-green-800 font-bold">
{`${text}%`}
</TableCell>
);
}
return (
<TableCell key={p} className="text-right p-2">{`${text}%`}</TableCell>
);
} else {
const isFinGroup = groupName === 'income' || groupName === 'balancesheet' || groupName === 'cashflow';
const scaled = key === 'total_mv'
? rawNum / 10000
: (isFinGroup || key === '__free_cash_flow' || key === 'repurchase_amount' ? rawNum / 1e8 : rawNum);
const formatter = key === 'total_mv' ? integerFormatter : numberFormatter;
const text = Number.isFinite(scaled) ? formatter.format(scaled) : '-';
if (key === '__free_cash_flow') {
const isNeg = typeof scaled === 'number' && scaled < 0;
return (
<TableCell key={p} className="text-right p-2">
{isNeg ? <span className="text-red-600 bg-red-100">{text}</span> : text}
</TableCell>
);
}
return (
<TableCell key={p} className="text-right p-2">{text}</TableCell>
);
}
})}
</TableRow>
);
});
// =========================
// 费用指标分组
// =========================
const feeHeaderRow = (
<TableRow key="__fee_metrics_row" className="bg-muted hover:bg-purple-100">
<TableCell className="p-2 font-medium "></TableCell>
{periods.map((p) => (
<TableCell key={p} className="p-2"></TableCell>
))} ))}
</TableRow>
);
const feeRows = [
{ key: '__sell_rate', label: '销售费用率' },
{ key: '__admin_rate', label: '管理费用率' },
{ key: '__rd_rate', label: '研发费用率' },
{ key: '__other_fee_rate', label: '其他费用率' },
{ key: '__tax_rate', label: '所得税率' },
{ key: '__depr_ratio', label: '折旧费用占比' },
].map(({ key, label }) => (
<TableRow key={key} className="hover:bg-purple-100">
<TableCell className="p-2 text-muted-foreground">{label}</TableCell>
{periods.map((p) => {
const points = series[key] as any[] | undefined;
const v = getValueByPeriod(points, p);
if (v == null || !Number.isFinite(v)) {
return <TableCell key={p} className="text-right p-2">-</TableCell>;
}
const rateText = numberFormatter.format(v);
const isNegative = v < 0;
return (
<TableCell key={p} className="text-right p-2">
{isNegative ? <span className="text-red-600 bg-red-100">{rateText}%</span> : `${rateText}%`}
</TableCell>
);
})}
</TableRow>
));
// =========================
// 资产占比分组
// =========================
const assetHeaderRow = (
<TableRow key="__asset_ratio_row" className="bg-muted hover:bg-purple-100">
<TableCell className="p-2 font-medium "></TableCell>
{periods.map((p) => (
<TableCell key={p} className="p-2"></TableCell>
))}
</TableRow>
);
const ratioCell = (value: number | null, keyStr: string) => {
if (value == null || !Number.isFinite(value)) {
return <TableCell key={keyStr} className="text-right p-2">-</TableCell>;
}
const text = numberFormatter.format(value);
const isNegative = value < 0;
const isHighRatio = value > 30;
let cellClassName = "text-right p-2";
if (isHighRatio) {
cellClassName += " bg-red-100";
} else if (isNegative) {
cellClassName += " bg-red-100";
}
return (
<TableCell key={keyStr} className={cellClassName}>
{isNegative ? <span className="text-red-600">{text}%</span> : `${text}%`}
</TableCell>
);
};
const assetRows = [
{ key: '__money_cap_ratio', label: '现金占比' },
{ key: '__inventories_ratio', label: '库存占比' },
{ key: '__ar_ratio', label: '应收款占比' },
{ key: '__prepay_ratio', label: '预付款占比' },
{ key: '__fix_assets_ratio', label: '固定资产占比' },
{ key: '__lt_invest_ratio', label: '长期投资占比' },
{ key: '__goodwill_ratio', label: '商誉占比' },
{ key: '__other_assets_ratio', label: '其他资产占比' },
{ key: '__ap_ratio', label: '应付款占比' },
{ key: '__adv_ratio', label: '预收款占比' },
{ key: '__st_borr_ratio', label: '短期借款占比' },
{ key: '__lt_borr_ratio', label: '长期借款占比' },
{ key: '__operating_assets_ratio', label: '运营资产占比' },
{ key: '__interest_bearing_debt_ratio', label: '有息负债率' },
].map(({ key, label }) => (
<TableRow key={key} className={`hover:bg-purple-100 ${key === '__other_assets_ratio' ? 'bg-yellow-50' : ''}`}>
<TableCell className="p-2 text-muted-foreground">{label}</TableCell>
{periods.map((p) => {
const points = series[key] as any[] | undefined;
const v = getValueByPeriod(points, p);
return ratioCell(v, p);
})}
</TableRow>
));
// =========================
// 周转能力分组
// =========================
const turnoverHeaderRow = (
<TableRow key="__turnover_row" className="bg-muted hover:bg-purple-100">
<TableCell className="p-2 font-medium "></TableCell>
{periods.map((p) => (
<TableCell key={p} className="p-2"></TableCell>
))}
</TableRow>
);
const turnoverItems: Array<{ key: string; label: string }> = [
{ key: 'invturn_days', label: '存货周转天数' },
{ key: 'arturn_days', label: '应收款周转天数' },
{ key: 'payturn_days', label: '应付款周转天数' },
{ key: 'fa_turn', label: '固定资产周转率' },
{ key: 'assets_turn', label: '总资产周转率' },
];
const turnoverRows = turnoverItems.map(({ key, label }) => (
<TableRow key={key} className="hover:bg-purple-100">
<TableCell className="p-2 text-muted-foreground">{label}</TableCell>
{periods.map((p) => {
const points = series[key] as any[] | undefined;
const v = getValueByPeriod(points, p);
const value = typeof v === 'number' ? v : (v == null ? null : Number(v));
if (value == null || !Number.isFinite(value)) {
return <TableCell key={p} className="text-right p-2">-</TableCell>;
}
const text = numberFormatter.format(value);
if (key === 'arturn_days' && value > 90) {
return (
<TableCell key={p} className="text-right p-2 bg-red-100 text-red-600">{text}</TableCell>
);
}
return <TableCell key={p} className="text-right p-2">{text}</TableCell>;
})}
</TableRow>
));
return [
summaryRow,
...rows,
feeHeaderRow,
...feeRows,
assetHeaderRow,
...assetRows,
turnoverHeaderRow,
...turnoverRows,
// =========================
// 人均效率分组
// =========================
(
<TableRow key="__per_capita_row" className="bg-muted hover:bg-purple-100">
<TableCell className="p-2 font-medium "></TableCell>
{periods.map((p) => (
<TableCell key={p} className="p-2"></TableCell>
))}
</TableRow>
),
// 员工人数(整数千分位)
(
<TableRow key="__employees_row" className="hover:bg-purple-100">
<TableCell className="p-2 text-muted-foreground"></TableCell>
{periods.map((p) => {
const points = series['employees'] as any[] | undefined;
const v = getValueByPeriod(points, p);
if (v == null) {
return <TableCell key={p} className="text-right p-2">-</TableCell>;
}
return <TableCell key={p} className="text-right p-2">{integerFormatter.format(Math.round(v))}</TableCell>;
})}
</TableRow>
),
// 人均创收 = 收入 / 员工人数(万元)
(
<TableRow key="__rev_per_emp_row" className="hover:bg-purple-100">
<TableCell className="p-2 text-muted-foreground"></TableCell>
{periods.map((p) => {
const points = series['__rev_per_emp'] as any[] | undefined;
const val = getValueByPeriod(points, p);
if (val == null) {
return <TableCell key={p} className="text-right p-2">-</TableCell>;
}
return <TableCell key={p} className="text-right p-2">{numberFormatter.format(val)}</TableCell>;
})}
</TableRow>
),
// 人均创利 = 净利润 / 员工人数(万元)
(
<TableRow key="__profit_per_emp_row" className="hover:bg-purple-100">
<TableCell className="p-2 text-muted-foreground"></TableCell>
{periods.map((p) => {
const points = series['__profit_per_emp'] as any[] | undefined;
const val = getValueByPeriod(points, p);
if (val == null) {
return <TableCell key={p} className="text-right p-2">-</TableCell>;
}
return <TableCell key={p} className="text-right p-2">{numberFormatter.format(val)}</TableCell>;
})}
</TableRow>
),
// 人均工资 = 支付给职工以及为职工支付的现金 / 员工人数(万元)
(
<TableRow key="__salary_per_emp_row" className="hover:bg-purple-100">
<TableCell className="p-2 text-muted-foreground"></TableCell>
{periods.map((p) => {
const points = series['__salary_per_emp'] as any[] | undefined;
const val = getValueByPeriod(points, p);
if (val == null) {
return <TableCell key={p} className="text-right p-2">-</TableCell>;
}
return <TableCell key={p} className="text-right p-2">{numberFormatter.format(val)}</TableCell>;
})}
</TableRow>
),
// =========================
// 市场表现分组
// =========================
(
<TableRow key="__market_perf_row" className="bg-muted hover:bg-purple-100">
<TableCell className="p-2 font-medium "></TableCell>
{periods.map((p) => (
<TableCell key={p} className="p-2"></TableCell>
))}
</TableRow>
),
// 股价(收盘价)
(
<TableRow key="__price_row" className="hover:bg-purple-100">
<TableCell className="p-2 text-muted-foreground"></TableCell>
{periods.map((p) => {
const points = series['close'] as any[] | undefined;
const v = getValueByPeriod(points, p);
if (v == null) {
return <TableCell key={p} className="text-right p-2">-</TableCell>;
}
return <TableCell key={p} className="text-right p-2">{numberFormatter.format(v)}</TableCell>;
})}
</TableRow>
),
// 市值按亿为单位显示乘以10000并整数千分位
(
<TableRow key="__market_cap_row" className="hover:bg-purple-100">
<TableCell className="p-2 text-muted-foreground">亿</TableCell>
{periods.map((p) => {
const points = series['total_mv'] as any[] | undefined;
const v = getValueByPeriod(points, p);
if (v == null) {
return <TableCell key={p} className="text-right p-2">-</TableCell>;
}
const scaled = v / 10000; // 转为亿元
return <TableCell key={p} className="text-right p-2">{integerFormatter.format(Math.round(scaled))}</TableCell>;
})}
</TableRow>
),
// PE
(
<TableRow key="__pe_row" className="hover:bg-purple-100">
<TableCell className="p-2 text-muted-foreground">PE</TableCell>
{periods.map((p) => {
const points = series['pe'] as any[] | undefined;
const v = getValueByPeriod(points, p);
if (v == null) {
return <TableCell key={p} className="text-right p-2">-</TableCell>;
}
return <TableCell key={p} className="text-right p-2">{numberFormatter.format(v)}</TableCell>;
})}
</TableRow>
),
// PB
(
<TableRow key="__pb_row" className="hover:bg-purple-100">
<TableCell className="p-2 text-muted-foreground">PB</TableCell>
{periods.map((p) => {
const points = series['pb'] as any[] | undefined;
const v = getValueByPeriod(points, p);
if (v == null) {
return <TableCell key={p} className="text-right p-2">-</TableCell>;
}
return <TableCell key={p} className="text-right p-2">{numberFormatter.format(v)}</TableCell>;
})}
</TableRow>
),
// 股东户数
(
<TableRow key="__holder_num_row" className="hover:bg-purple-100">
<TableCell className="p-2 text-muted-foreground"></TableCell>
{periods.map((p) => {
const points = series['holder_num'] as any[] | undefined;
const v = getValueByPeriod(points, p);
if (v == null) {
return <TableCell key={p} className="text-right p-2">-</TableCell>;
}
return <TableCell key={p} className="text-right p-2">{integerFormatter.format(Math.round(v))}</TableCell>;
})}
</TableRow>
),
];
})()}
</TableBody> </TableBody>
</Table> </Table>
);
})()}
</div> </div>
)} <div className="mt-4 text-xs text-muted-foreground">
* Data aggregated from multiple sources. Duplicate metrics from different sources are currently overwritten by the latest received.
</div> </div>
</CardContent>
</Card>
); );
} }

View File

@ -0,0 +1,90 @@
import React from 'react';
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
import { ScrollArea } from "@/components/ui/scroll-area";
import { Badge } from "@/components/ui/badge";
import { TaskStatus } from '@/types/workflow';
interface FundamentalDataViewProps {
taskStates: Record<string, TaskStatus>;
taskOutputs: Record<string, string>;
}
export function FundamentalDataView({ taskStates, taskOutputs }: FundamentalDataViewProps) {
// Filter tasks that look like data fetching tasks
const dataTasks = Object.keys(taskStates).filter(taskId =>
taskId.startsWith('fetch:') // Standardized task ID format: "fetch:provider_id"
);
if (dataTasks.length === 0) {
return (
<div className="text-center p-8 text-muted-foreground">
No data providers detected in this workflow.
</div>
);
}
return (
<div className="grid grid-cols-1 md:grid-cols-2 gap-4">
{dataTasks.map(taskId => {
const status = taskStates[taskId];
const output = taskOutputs[taskId];
// Dynamic name resolution: extract provider ID from task ID (e.g., "fetch:tushare" -> "Tushare")
const providerId = taskId.replace('fetch:', '');
const providerName = providerId.charAt(0).toUpperCase() + providerId.slice(1);
return (
<Card key={taskId} className="flex flex-col h-[400px]">
<CardHeader className="pb-2 flex flex-row items-center justify-between space-y-0">
<CardTitle className="text-lg font-medium truncate" title={providerName}>
{providerName}
</CardTitle>
<StatusBadge status={status} />
</CardHeader>
<CardContent className="flex-1 min-h-0 pt-2">
<ScrollArea className="h-full w-full border rounded-md bg-muted/5 p-4">
{output ? (
<pre className="text-xs font-mono whitespace-pre-wrap break-words text-foreground/80">
{tryFormatJson(output)}
</pre>
) : (
<div className="flex items-center justify-center h-full text-muted-foreground text-sm italic">
{status === 'pending' || status === 'running' ? 'Waiting for data...' : 'No data returned'}
</div>
)}
</ScrollArea>
</CardContent>
</Card>
);
})}
</div>
);
}
function StatusBadge({ status }: { status: TaskStatus }) {
switch (status) {
case 'completed':
return <Badge variant="default" className="bg-green-600">Success</Badge>;
case 'failed':
return <Badge variant="destructive">Failed</Badge>;
case 'running':
return <Badge variant="secondary" className="animate-pulse text-blue-500">Fetching</Badge>;
default:
return <Badge variant="outline">Pending</Badge>;
}
}
function tryFormatJson(str: string): string {
try {
// Only try to format if it looks like JSON object or array
const trimmed = str.trim();
if ((trimmed.startsWith('{') && trimmed.endsWith('}')) ||
(trimmed.startsWith('[') && trimmed.endsWith(']'))) {
const obj = JSON.parse(str);
return JSON.stringify(obj, null, 2);
}
return str;
} catch (e) {
return str;
}
}

View File

@ -0,0 +1,203 @@
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
import { Badge } from "@/components/ui/badge";
import { ScrollArea } from "@/components/ui/scroll-area";
import { TaskProgress } from "@/types";
import { useState, useMemo } from "react";
import { ChevronDown, ChevronRight, CheckCircle2, XCircle, Loader2, AlertCircle } from "lucide-react";
import { cn } from "@/lib/utils";
interface RawDataViewerProps {
data: unknown[];
tasks: TaskProgress[];
status: 'idle' | 'fetching' | 'complete' | 'error';
}
export function RawDataViewer({ data, tasks, status }: RawDataViewerProps) {
const groupedProviders = useMemo(() => {
// 1. Group Data by Source
const dataGroups: Record<string, unknown[]> = {};
if (Array.isArray(data)) {
data.forEach((item: any) => {
// Normalize source: lowercase
// In SessionDataDto, provider is the key. item.source is legacy/fallback.
const source = (item.provider || item.source || 'unknown').toLowerCase();
if (!dataGroups[source]) dataGroups[source] = [];
// We push the whole item which contains { provider, data_type, data_payload }
dataGroups[source].push(item);
});
}
// 2. Map Tasks to Providers
const providerMap = new Map<string, {
name: string;
status: string;
details?: string;
data: unknown[];
}>();
// Populate from Tasks first (as they represent the current execution plan)
tasks.forEach(task => {
// Task name format: "provider:symbol" or just "provider"
const providerName = task.task_name.split(':')[0].toLowerCase();
providerMap.set(providerName, {
name: providerName,
status: task.status,
details: task.details,
data: dataGroups[providerName] || []
});
});
// Populate remaining data sources (historical or pre-fetched) that might not have a task
Object.keys(dataGroups).forEach(source => {
if (!providerMap.has(source)) {
providerMap.set(source, {
name: source,
status: 'completed', // If we have data, assume it's done
details: 'Data loaded from cache/db',
data: dataGroups[source]
});
}
});
return Array.from(providerMap.values());
}, [data, tasks]);
if (groupedProviders.length === 0 && status === 'error') {
return (
<div className="flex items-center justify-center h-[100px] text-destructive border rounded-lg border-destructive/50 bg-destructive/10 gap-2">
<AlertCircle className="h-5 w-5" />
<span>Error fetching data.</span>
</div>
);
}
if (groupedProviders.length === 0 && status === 'fetching') {
return (
<div className="flex items-center justify-center h-[100px] text-muted-foreground gap-2">
<Loader2 className="h-4 w-4 animate-spin" />
<span>Waiting for providers...</span>
</div>
)
}
return (
<div className="space-y-4">
{groupedProviders.map(provider => (
<ProviderSection
key={provider.name}
name={provider.name}
status={provider.status}
details={provider.details}
data={provider.data}
/>
))}
{groupedProviders.length === 0 && status === 'complete' && (
<div className="text-center p-8 text-muted-foreground border rounded-lg border-dashed">
No data providers found.
</div>
)}
</div>
);
}
function ProviderSection({
name,
status,
details,
data
}: {
name: string;
status: string;
details?: string;
data: unknown[]
}) {
const [isOpen, setIsOpen] = useState(false);
// Auto-open if there is an error to show details, otherwise keep closed to save space
// or keep open if it's the only one? Let's default to closed but maybe open if it has interesting data?
// User said "generally collapsed", so default false.
return (
<Card className={cn("transition-all", isOpen ? "ring-1 ring-primary/20" : "")}>
<CardHeader
className="py-3 px-4 cursor-pointer hover:bg-muted/50 transition-colors select-none"
onClick={() => setIsOpen(!isOpen)}
>
<div className="flex items-center justify-between">
<div className="flex items-center gap-3">
{isOpen ? <ChevronDown className="h-4 w-4 text-muted-foreground"/> : <ChevronRight className="h-4 w-4 text-muted-foreground"/>}
<div className="flex flex-col">
<div className="flex items-center gap-2">
<CardTitle className="text-base font-semibold capitalize leading-none">
{name}
</CardTitle>
<StatusBadge status={status} />
</div>
</div>
</div>
<div className="flex items-center gap-4 text-sm text-muted-foreground">
<span className="hidden sm:inline-block text-xs bg-secondary px-2 py-1 rounded-md">
{data.length} records
</span>
</div>
</div>
{/* Progress/Details Text */}
{(details || status === 'in_progress') && (
<div className="ml-7 mt-1 text-xs text-muted-foreground truncate max-w-[600px]">
{details}
</div>
)}
</CardHeader>
{isOpen && (
<CardContent className="pt-0 px-4 pb-4">
<div className="mt-2 border rounded-md bg-muted/30">
{data.length > 0 ? (
<ScrollArea className="h-[300px] w-full p-4">
{/* Show structured data if available */}
<div className="space-y-4">
{data.map((item: any, idx) => (
<div key={idx} className="space-y-1">
<div className="flex items-center gap-2">
<Badge variant="outline" className="text-[10px] h-5">{item.data_type || 'unknown'}</Badge>
<span className="text-xs text-muted-foreground">{item.created_at || ''}</span>
</div>
<pre className="text-xs font-mono break-all whitespace-pre-wrap text-foreground/80 bg-background/50 p-2 rounded border">
{/* If item.data_payload exists, show it. Otherwise show item (legacy) */}
{JSON.stringify(item.data_payload !== undefined ? item.data_payload : item, null, 2)}
</pre>
</div>
))}
</div>
</ScrollArea>
) : (
<div className="p-8 text-center text-sm text-muted-foreground">
{status === 'failed'
? "No data generated due to failure."
: "No data records available yet."}
</div>
)}
</div>
</CardContent>
)}
</Card>
);
}
function StatusBadge({ status }: { status: string }) {
const s = status.toLowerCase();
if (s === 'completed' || s === 'success') {
return <Badge variant="outline" className="bg-green-500/10 text-green-600 border-green-500/20 hover:bg-green-500/20 gap-1"><CheckCircle2 className="h-3 w-3"/> Success</Badge>;
}
if (s === 'failed' || s === 'error') {
return <Badge variant="destructive" className="gap-1"><XCircle className="h-3 w-3"/> Failed</Badge>;
}
if (s === 'in_progress' || s === 'running') {
return <Badge variant="secondary" className="gap-1 animate-pulse"><Loader2 className="h-3 w-3 animate-spin"/> Running</Badge>;
}
return <Badge variant="outline" className="text-muted-foreground">{status}</Badge>;
}

View File

@ -0,0 +1,168 @@
import React, { useEffect, useState } from 'react';
import { Tabs, TabsList, TabsTrigger, TabsContent } from "@/components/ui/tabs";
import { Card, CardHeader, CardTitle } from "@/components/ui/card";
import { Button } from "@/components/ui/button";
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select";
import { Badge } from "@/components/ui/badge";
import { Loader2 } from "lucide-react";
import { AnalysisTemplateSets } from '@/types';
import { ReportState } from '@/types/report';
import { RawDataViewer } from './RawDataViewer';
import { FinancialTable } from './FinancialTable';
import { AnalysisViewer } from './AnalysisViewer';
import { StockChart } from './StockChart';
interface ReportLayoutProps {
state: ReportState;
onTemplateChange: (id: string) => void;
onTrigger: () => void;
}
export function ReportLayout({ state, onTemplateChange, onTrigger }: ReportLayoutProps) {
const [templates, setTemplates] = useState<AnalysisTemplateSets>({});
const [activeTab, setActiveTab] = useState("chart");
// Load templates for dropdown
useEffect(() => {
fetch('/api/configs/analysis_template_sets')
.then(res => res.json())
.then(data => setTemplates(data))
.catch(err => console.error(err));
}, []);
// Auto-switch tabs based on state
useEffect(() => {
if (state.fetchStatus === 'fetching') {
setActiveTab("fundamental");
} else if (state.analysisStatus === 'running' && activeTab === 'fundamental') {
// Try to switch to the first analysis tab if available
const firstModule = state.templateConfig ? Object.keys(state.templateConfig.modules)[0] : null;
if (firstModule) {
setActiveTab(firstModule);
}
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [state.fetchStatus, state.analysisStatus, state.templateConfig]);
return (
<div className="container mx-auto p-4 space-y-4">
{/* Header Area */}
<Card>
<CardHeader className="flex flex-row items-center justify-between space-y-0 pb-2">
<div className="flex flex-col space-y-1">
<CardTitle className="text-2xl font-bold">{state.symbol}</CardTitle>
<div className="text-sm text-muted-foreground flex items-center gap-2">
<span>Market: {state.market || 'Unknown'}</span>
<span></span>
<Badge variant={state.analysisStatus === 'running' ? "default" : state.analysisStatus === 'error' ? "destructive" : "secondary"}>
{state.analysisStatus === 'running' ? 'Analyzing...' :
state.analysisStatus === 'error' ? 'Failed' :
state.fetchStatus === 'fetching' ? 'Fetching Data...' : 'Ready'}
</Badge>
</div>
</div>
<div className="flex items-center gap-4">
<div className="w-[200px]">
<Select value={state.templateId} onValueChange={onTemplateChange}>
<SelectTrigger>
<SelectValue placeholder="Select Template" />
</SelectTrigger>
<SelectContent>
{Object.entries(templates).map(([id, tpl]) => (
<SelectItem key={id} value={id}>{tpl.name}</SelectItem>
))}
</SelectContent>
</Select>
</div>
<Button
onClick={onTrigger}
disabled={!state.templateId || state.fetchStatus === 'fetching' || state.analysisStatus === 'running'}
>
{state.fetchStatus === 'fetching' || state.analysisStatus === 'running' ? (
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
) : null}
Start Analysis
</Button>
</div>
</CardHeader>
</Card>
{/* Main Content Area */}
{/* Error Banner */}
{(state.analysisStatus === 'error' || state.fetchStatus === 'error') && state.error && (
<div className="bg-destructive/15 text-destructive text-sm p-3 rounded-md border border-destructive/20">
<strong>Error:</strong> {state.error}
</div>
)}
<Tabs value={activeTab} onValueChange={setActiveTab} className="w-full">
<TabsList className="w-full justify-start overflow-x-auto">
<TabsTrigger value="chart">Stock Chart</TabsTrigger>
<TabsTrigger value="fundamental">Fundamental Data</TabsTrigger>
{/* Dynamic Tabs from Template */}
{state.templateConfig && Object.entries(state.templateConfig.modules).map(([moduleId, moduleConfig]) => (
<TabsTrigger key={moduleId} value={moduleId} disabled={state.fetchStatus !== 'complete'}>
{moduleConfig.name}
{state.analysisStatus === 'running' && !state.analysisResults[moduleId] && (
<Loader2 className="ml-2 h-3 w-3 animate-spin" />
)}
</TabsTrigger>
))}
</TabsList>
<div className="mt-4">
{/* SCENARIO 1: Stock Chart */}
<TabsContent value="chart" className="m-0">
<StockChart symbol={state.symbol} />
</TabsContent>
{/* SCENARIO 2: Fundamental Data */}
<TabsContent value="fundamental" className="m-0 space-y-8">
{/* Always show the Provider/Task Grid (RawDataViewer) as it contains the per-provider status */}
<RawDataViewer
data={state.fundamentalData}
tasks={state.tasks}
status={state.fetchStatus}
/>
{/* Show Aggregated Table only when complete */}
{/* User request: Hide consolidated table temporarily to focus on raw provider data */}
{/* {state.fetchStatus === 'complete' && (
<div className="space-y-4">
<div className="flex items-center justify-between">
<h3 className="text-lg font-semibold tracking-tight">Aggregated Financial Statements</h3>
</div>
<FinancialTable
data={state.fundamentalData}
status={state.fetchStatus}
/>
</div>
)} */}
</TabsContent>
{/* SCENARIO 3: Analysis Modules */}
{state.templateConfig && Object.entries(state.templateConfig.modules).map(([moduleId, moduleConfig]) => (
<TabsContent key={moduleId} value={moduleId} className="m-0">
<AnalysisViewer
result={state.analysisResults[moduleId]}
config={moduleConfig}
isActive={activeTab === moduleId}
/>
</TabsContent>
))}
</div>
</Tabs>
{/* Execution Details Footer */}
<div className="text-xs text-muted-foreground border-t pt-2 flex justify-between">
<span>Request ID: {state.requestId || '-'}</span>
<span>
Time: {(state.executionMeta.elapsed / 1000).toFixed(1)}s
| Tokens: {state.executionMeta.tokens}
</span>
</div>
</div>
);
}

View File

@ -1,57 +1,20 @@
import { CheckCircle } from 'lucide-react'; import React from 'react';
import { Spinner } from '@/components/ui/spinner';
import { TradingViewWidget } from '@/components/TradingViewWidget'; import { TradingViewWidget } from '@/components/TradingViewWidget';
interface StockChartProps { interface StockChartProps {
unifiedSymbol: string; symbol: string;
marketParam: string;
realtime: any;
realtimeLoading: boolean;
realtimeError: any;
} }
export function StockChart({ export function StockChart({ symbol }: StockChartProps) {
unifiedSymbol, // Simple heuristic to detect market.
marketParam, // If 6 digits at start or ends with .SH/.SZ, it's likely China.
realtime, // Otherwise default to US (or let TradingView handle it).
realtimeLoading, const isChina = /^\d{6}/.test(symbol) || symbol.endsWith('.SH') || symbol.endsWith('.SZ');
realtimeError, const market = isChina ? 'china' : 'us';
}: StockChartProps) {
return ( return (
<div className="space-y-4"> <div className="h-[500px] w-full mt-4">
<h2 className="text-lg font-medium"> TradingView</h2> <TradingViewWidget symbol={symbol} market={market} height={500} />
<div className="flex items-center justify-between text-sm mb-4">
<div className="flex items-center gap-3">
<CheckCircle className="size-4 text-green-600" />
<div className="text-muted-foreground">
- {unifiedSymbol}
</div>
</div>
<div className="text-xs text-muted-foreground">
{realtimeLoading ? (
<span className="inline-flex items-center gap-2"><Spinner className="size-3" /> </span>
) : realtimeError ? (
<span className="text-red-500"></span>
) : (() => {
const priceRaw = realtime?.price;
const priceNum = typeof priceRaw === 'number' ? priceRaw : Number(priceRaw);
const tsRaw = realtime?.ts;
const tsDate = tsRaw == null ? null : new Date(typeof tsRaw === 'number' ? tsRaw : String(tsRaw));
const tsText = tsDate && !isNaN(tsDate.getTime()) ? `${tsDate.toLocaleString()}` : '';
if (Number.isFinite(priceNum)) {
return <span> {priceNum.toLocaleString()} {tsText}</span>;
}
return <span></span>;
})()}
</div>
</div>
<TradingViewWidget
symbol={unifiedSymbol}
market={marketParam}
height={500}
/>
</div> </div>
); );
} }

View File

@ -0,0 +1,275 @@
import React, { useEffect, useState } from 'react';
import { useRouter } from 'next/navigation'; // Import router
import { Tabs, TabsList, TabsTrigger, TabsContent } from "@/components/ui/tabs";
import { Card, CardHeader, CardTitle, CardContent } from "@/components/ui/card";
import { Button } from "@/components/ui/button";
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select";
import { Badge } from "@/components/ui/badge";
import { Loader2, Play, RefreshCw, BrainCircuit, Activity, Database, LayoutDashboard, LineChart, FileText } from "lucide-react";
import { WorkflowVisualizer } from '@/components/workflow/WorkflowVisualizer';
import { StockChart } from './StockChart';
import { useWorkflow } from '@/hooks/useWorkflow';
import { AnalysisTemplateSets } from '@/types/index';
import ReactMarkdown from 'react-markdown';
import remarkGfm from 'remark-gfm';
import { ScrollArea } from '@/components/ui/scroll-area';
import { FundamentalDataView } from './FundamentalDataView';
import { AnalysisModulesView } from './AnalysisModulesView';
interface WorkflowReportLayoutProps {
symbol: string;
initialMarket?: string;
initialTemplateId?: string;
initialRequestId?: string;
}
export function WorkflowReportLayout({
symbol,
initialMarket,
initialTemplateId,
initialRequestId
}: WorkflowReportLayoutProps) {
const router = useRouter();
const workflow = useWorkflow();
const [templates, setTemplates] = useState<AnalysisTemplateSets>({});
const [selectedTemplateId, setSelectedTemplateId] = useState(initialTemplateId || '');
const [activeTab, setActiveTab] = useState("chart"); // Default to chart for quick overview
// Auto-connect if initialRequestId is provided
useEffect(() => {
if (initialRequestId && workflow.status === 'idle' && !workflow.requestId) {
workflow.connectToWorkflow(initialRequestId);
}
}, [initialRequestId, workflow]);
// Load templates
useEffect(() => {
fetch('/api/configs/analysis_template_sets')
.then(res => res.json())
.then(data => {
setTemplates(data);
if (!selectedTemplateId && Object.keys(data).length > 0) {
// Default to 'standard_analysis' if exists, else first key
setSelectedTemplateId(data['standard_analysis'] ? 'standard_analysis' : Object.keys(data)[0]);
}
})
.catch(err => console.error('Failed to load templates:', err));
}, [selectedTemplateId]);
// Auto switch to analysis tab when workflow starts
useEffect(() => {
if (workflow.status === 'connecting' || workflow.status === 'connected') {
setActiveTab("analysis");
}
}, [workflow.status]);
const handleStart = async () => {
if (!selectedTemplateId) return;
const response = await workflow.startWorkflow({
symbol,
market: initialMarket,
template_id: selectedTemplateId
});
// Handle Symbol Normalization Redirection
if (response && response.symbol && response.symbol !== symbol) {
console.log(`Redirecting normalized symbol: ${symbol} -> ${response.symbol}`);
const newUrl = `/report/${encodeURIComponent(response.symbol)}?template_id=${selectedTemplateId}&market=${response.market}`;
router.replace(newUrl);
}
};
const isRunning = workflow.status === 'connecting' || workflow.status === 'connected';
// Get current template config for dynamic tabs
const currentTemplate = templates[selectedTemplateId];
const dynamicModules = currentTemplate?.modules || {};
return (
<div className="container mx-auto p-4 space-y-4">
{/* Header Card */}
<Card>
<CardHeader className="flex flex-row items-center justify-between py-4">
<div className="flex flex-col gap-1">
<CardTitle className="text-2xl font-bold flex items-center gap-3">
{symbol}
<Badge variant="outline" className="font-normal text-sm">
{initialMarket || 'Unknown Market'}
</Badge>
</CardTitle>
<div className="text-sm text-muted-foreground flex items-center gap-2">
<StatusBadge status={workflow.status} error={workflow.error} />
{workflow.requestId && (
<span className="text-xs font-mono text-muted-foreground/50">
ID: {workflow.requestId}
</span>
)}
</div>
</div>
<div className="flex items-center gap-4">
<div className="w-[200px]">
<Select
value={selectedTemplateId}
onValueChange={setSelectedTemplateId}
disabled={isRunning}
>
<SelectTrigger>
<SelectValue placeholder="Select Template" />
</SelectTrigger>
<SelectContent>
{Object.entries(templates).map(([id, tpl]) => (
<SelectItem key={id} value={id}>{tpl.name}</SelectItem>
))}
</SelectContent>
</Select>
</div>
<Button
onClick={handleStart}
disabled={!selectedTemplateId || isRunning}
className="min-w-[100px]"
>
{isRunning ? (
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
) : (
<Play className="mr-2 h-4 w-4" />
)}
{isRunning ? 'Running...' : 'Start'}
</Button>
</div>
</CardHeader>
</Card>
{/* Main Content Tabs */}
<Tabs value={activeTab} onValueChange={setActiveTab} className="w-full">
<TabsList className="grid w-full grid-cols-4">
<TabsTrigger value="chart" className="gap-2">
<LineChart className="w-4 h-4" />
Market Chart
</TabsTrigger>
<TabsTrigger value="data" className="gap-2">
<Database className="w-4 h-4" />
Fundamental Data
</TabsTrigger>
<TabsTrigger value="analysis" className="gap-2">
<LayoutDashboard className="w-4 h-4" />
Analysis Modules
</TabsTrigger>
<TabsTrigger value="monitor" className="gap-2">
<Activity className="w-4 h-4" />
Workflow Monitor
</TabsTrigger>
</TabsList>
{/* Tab A: Market Chart */}
<TabsContent value="chart" className="mt-4">
<StockChart symbol={symbol} />
</TabsContent>
{/* Tab B: Fundamental Data */}
<TabsContent value="data" className="mt-4">
<FundamentalDataView
taskStates={workflow.taskStates}
taskOutputs={workflow.taskOutputs}
/>
</TabsContent>
{/* Tab C: Analysis Modules */}
<TabsContent value="analysis" className="mt-4">
{workflow.requestId ? (
<AnalysisModulesView
taskStates={workflow.taskStates}
taskOutputs={workflow.taskOutputs}
modulesConfig={dynamicModules}
/>
) : (
<EmptyState onStart={handleStart} message="Start workflow to generate analysis" />
)}
</TabsContent>
{/* Tab D: Workflow Monitor */}
<TabsContent value="monitor" className="mt-4 space-y-4">
{workflow.dag ? (
<div className="grid grid-cols-3 gap-4">
<div className="col-span-2">
<WorkflowVisualizer
dag={workflow.dag}
taskStates={workflow.taskStates}
taskOutputs={workflow.taskOutputs}
/>
</div>
<div className="col-span-1">
<Card className="h-full">
<CardHeader>
<CardTitle className="text-lg">Execution Stats</CardTitle>
</CardHeader>
<CardContent>
<div className="space-y-4 text-sm">
<div className="flex justify-between py-2 border-b">
<span className="text-muted-foreground">Status</span>
<span className="font-medium">{workflow.status}</span>
</div>
<div className="flex justify-between py-2 border-b">
<span className="text-muted-foreground">Tasks Total</span>
<span className="font-medium">{workflow.dag.nodes.length}</span>
</div>
<div className="flex justify-between py-2 border-b">
<span className="text-muted-foreground">Tasks Completed</span>
<span className="font-medium text-green-600">
{Object.values(workflow.taskStates).filter(s => s === 'completed').length}
</span>
</div>
{workflow.finalResult && (
<div className="pt-4">
<Button variant="outline" className="w-full gap-2" onClick={() => setActiveTab("analysis")}>
<FileText className="w-4 h-4" />
View Final Report
</Button>
</div>
)}
</div>
</CardContent>
</Card>
</div>
</div>
) : (
<EmptyState onStart={handleStart} message="Ready to visualize workflow execution" />
)}
</TabsContent>
</Tabs>
</div>
);
}
function StatusBadge({ status, error }: { status: string, error: string | null }) {
if (error) {
return <Badge variant="destructive">Error: {error}</Badge>;
}
switch (status) {
case 'connecting':
case 'connected':
return <Badge variant="secondary" className="animate-pulse">Processing</Badge>;
case 'disconnected': // Usually means finished
return <Badge variant="outline">Finished</Badge>;
case 'idle':
return <Badge variant="outline">Ready</Badge>;
default:
return <Badge variant="outline">{status}</Badge>;
}
}
function EmptyState({ onStart, message }: { onStart: () => void, message: string }) {
return (
<div className="flex flex-col items-center justify-center h-[400px] border rounded-lg border-dashed bg-muted/5 text-muted-foreground">
<BrainCircuit className="w-12 h-12 mb-4 opacity-20" />
<p className="mb-4">{message}</p>
<Button variant="outline" onClick={onStart}>
Run Analysis
</Button>
</div>
);
}
// Removed FinalReportView as it is now superseded by AnalysisModulesView

View File

@ -1,446 +0,0 @@
import { useState, useRef, useEffect, useMemo } from 'react';
import { useDataRequest, useTaskProgress } from '@/hooks/useApi';
interface AnalysisState {
content: string;
loading: boolean;
error: string | null;
elapsed_ms?: number;
}
interface AnalysisRecord {
type: string;
name: string;
status: 'pending' | 'running' | 'done' | 'error';
start_ts?: string;
end_ts?: string;
duration_ms?: number;
tokens?: {
prompt_tokens: number;
completion_tokens: number;
total_tokens: number;
};
error?: string;
}
export function useAnalysisRunner(
financials: any,
financialConfig: any,
normalizedMarket: string,
unifiedSymbol: string,
isLoading: boolean,
error: any,
templateSets: any // Added templateSets
) {
// --- Template Logic ---
const [selectedTemplateId, setSelectedTemplateId] = useState<string>('');
// Set default template
useEffect(() => {
if (!selectedTemplateId && templateSets && Object.keys(templateSets).length > 0) {
const defaultId = Object.keys(templateSets).find(k => k.includes('standard') || k === 'default') || Object.keys(templateSets)[0];
setSelectedTemplateId(defaultId);
}
}, [templateSets, selectedTemplateId]);
const reportTemplateId = financials?.meta?.template_id;
// Determine active template set
const activeTemplateId = (financials && reportTemplateId) ? reportTemplateId : selectedTemplateId;
const activeTemplateSet = useMemo(() => {
if (!activeTemplateId || !templateSets) return null;
return templateSets[activeTemplateId] || null;
}, [activeTemplateId, templateSets]);
// Derive effective analysis config from template set, falling back to global config if needed
const activeAnalysisConfig = useMemo(() => {
if (activeTemplateSet) {
return {
...financialConfig,
analysis_modules: activeTemplateSet.modules,
};
}
return financialConfig; // Fallback to global config (legacy behavior)
}, [activeTemplateSet, financialConfig]);
// 分析类型列表
const analysisTypes = useMemo(() => {
if (!activeAnalysisConfig?.analysis_modules) return [];
return Object.keys(activeAnalysisConfig.analysis_modules);
}, [activeAnalysisConfig]);
// 分析状态管理
const [analysisStates, setAnalysisStates] = useState<Record<string, AnalysisState>>({});
const fullAnalysisTriggeredRef = useRef<boolean>(false);
const isAnalysisRunningRef = useRef<boolean>(false);
const analysisFetchedRefs = useRef<Record<string, boolean>>({});
const stopRequestedRef = useRef<boolean>(false);
const abortControllerRef = useRef<AbortController | null>(null);
const currentAnalysisTypeRef = useRef<string | null>(null);
const [manualRunKey, setManualRunKey] = useState(0);
// 当前正在执行的分析任务
const [currentAnalysisTask, setCurrentAnalysisTask] = useState<string | null>(null);
// 计时器状态
const [startTime, setStartTime] = useState<number | null>(null);
const [elapsedSeconds, setElapsedSeconds] = useState(0);
// 分析执行记录
const [analysisRecords, setAnalysisRecords] = useState<AnalysisRecord[]>([]);
// 新架构:触发分析与查看任务进度
const { trigger: triggerAnalysisRequest, isMutating: triggering } = useDataRequest();
const [requestId, setRequestId] = useState<string | null>(null);
const { progress: taskProgress } = useTaskProgress(requestId);
// 计算完成比例
const completionProgress = useMemo(() => {
const totalTasks = analysisRecords.length;
if (totalTasks === 0) return 0;
const completedTasks = analysisRecords.filter(r => r.status === 'done' || r.status === 'error').length;
return (completedTasks / totalTasks) * 100;
}, [analysisRecords]);
// 总耗时ms
const totalElapsedMs = useMemo(() => {
const finMs = financials?.meta?.elapsed_ms || 0;
const analysesMs = analysisRecords.reduce((sum, r) => sum + (r.duration_ms || 0), 0);
return finMs + analysesMs;
}, [financials?.meta?.elapsed_ms, analysisRecords]);
const hasRunningTask = useMemo(() => {
if (currentAnalysisTask !== null) return true;
if (analysisRecords.some(r => r.status === 'running')) return true;
return false;
}, [currentAnalysisTask, analysisRecords]);
// 全部任务是否完成
const allTasksCompleted = useMemo(() => {
if (analysisRecords.length === 0) return false;
const allDoneOrErrored = analysisRecords.every(r => r.status === 'done' || r.status === 'error');
return allDoneOrErrored && !hasRunningTask && currentAnalysisTask === null;
}, [analysisRecords, hasRunningTask, currentAnalysisTask]);
// 所有任务完成时,停止计时器
useEffect(() => {
if (allTasksCompleted) {
setStartTime(null);
}
}, [allTasksCompleted]);
useEffect(() => {
if (!startTime) return;
const interval = setInterval(() => {
const now = Date.now();
const elapsed = Math.floor((now - startTime) / 1000);
setElapsedSeconds(elapsed);
}, 1000);
return () => clearInterval(interval);
}, [startTime]);
const retryAnalysis = async (analysisType: string) => {
if (!financials || !activeAnalysisConfig?.analysis_modules) {
return;
}
analysisFetchedRefs.current[analysisType] = false;
setAnalysisStates(prev => ({
...prev,
[analysisType]: { content: '', loading: true, error: null }
}));
setAnalysisRecords(prev => prev.filter(record => record.type !== analysisType));
const analysisName =
activeAnalysisConfig.analysis_modules[analysisType]?.name || analysisType;
const startTimeISO = new Date().toISOString();
setCurrentAnalysisTask(analysisType);
setAnalysisRecords(prev => [...prev, {
type: analysisType,
name: analysisName,
status: 'running',
start_ts: startTimeISO
}]);
try {
const startedMsLocal = Date.now();
const response = await fetch(
`/api/financials/${normalizedMarket}/${unifiedSymbol}/analysis/${analysisType}/stream?company_name=${encodeURIComponent(financials?.name || unifiedSymbol)}`
);
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
const reader = response.body?.getReader();
const decoder = new TextDecoder();
let aggregate = '';
if (reader) {
while (true) {
const { value, done } = await reader.read();
if (done) break;
const chunk = decoder.decode(value, { stream: true });
aggregate += chunk;
const snapshot = aggregate;
setAnalysisStates(prev => ({
...prev,
[analysisType]: {
...prev[analysisType],
content: snapshot,
loading: true,
error: null,
}
}));
}
}
const endTime = new Date().toISOString();
const elapsedMs = Date.now() - startedMsLocal;
setAnalysisStates(prev => ({
...prev,
[analysisType]: {
...prev[analysisType],
content: aggregate,
loading: false,
error: null,
elapsed_ms: elapsedMs,
}
}));
setAnalysisRecords(prev => prev.map(record =>
record.type === analysisType
? {
...record,
status: 'done',
end_ts: endTime,
duration_ms: elapsedMs,
}
: record
));
} catch (err) {
const errorMessage = err instanceof Error ? err.message : '加载失败';
const endTime = new Date().toISOString();
setAnalysisStates(prev => ({
...prev,
[analysisType]: {
...prev[analysisType],
content: '',
loading: false,
error: errorMessage
}
}));
setAnalysisRecords(prev => prev.map(record =>
record.type === analysisType
? {
...record,
status: 'error',
end_ts: endTime,
error: errorMessage
}
: record
));
} finally {
setCurrentAnalysisTask(null);
analysisFetchedRefs.current[analysisType] = true;
}
};
useEffect(() => {
if (isLoading || error || !financials || !activeAnalysisConfig?.analysis_modules || analysisTypes.length === 0) {
return;
}
if (isAnalysisRunningRef.current) {
return;
}
const runAnalysesSequentially = async () => {
if (isAnalysisRunningRef.current) {
return;
}
isAnalysisRunningRef.current = true;
try {
if (!stopRequestedRef.current && !startTime) {
setStartTime(Date.now());
}
for (let i = 0; i < analysisTypes.length; i++) {
const analysisType = analysisTypes[i];
if (stopRequestedRef.current) {
break;
}
if (analysisFetchedRefs.current[analysisType]) {
continue;
}
if (!analysisFetchedRefs.current || !activeAnalysisConfig?.analysis_modules) {
console.error("分析配置或refs未初始化无法进行分析。");
continue;
}
currentAnalysisTypeRef.current = analysisType;
const analysisName =
activeAnalysisConfig.analysis_modules[analysisType]?.name || analysisType;
const startTimeISO = new Date().toISOString();
setCurrentAnalysisTask(analysisType);
setAnalysisRecords(prev => {
const next = [...prev];
const idx = next.findIndex(r => r.type === analysisType);
const updated: AnalysisRecord = {
type: analysisType,
name: analysisName,
status: 'running' as const,
start_ts: startTimeISO
};
if (idx >= 0) {
next[idx] = { ...next[idx], ...updated };
} else {
next.push(updated);
}
return next;
});
setAnalysisStates(prev => ({
...prev,
[analysisType]: { content: '', loading: true, error: null }
}));
try {
abortControllerRef.current?.abort();
abortControllerRef.current = new AbortController();
const startedMsLocal = Date.now();
const response = await fetch(
`/api/financials/${normalizedMarket}/${unifiedSymbol}/analysis/${analysisType}/stream?company_name=${encodeURIComponent(financials?.name || unifiedSymbol)}`,
{ signal: abortControllerRef.current.signal }
);
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
const reader = response.body?.getReader();
const decoder = new TextDecoder();
let aggregate = '';
if (reader) {
// 持续读取并追加到内容
while (true) {
const { value, done } = await reader.read();
if (done) break;
const chunk = decoder.decode(value, { stream: true });
aggregate += chunk;
const snapshot = aggregate;
setAnalysisStates(prev => ({
...prev,
[analysisType]: {
...prev[analysisType],
content: snapshot,
loading: true,
error: null,
}
}));
}
}
const endTime = new Date().toISOString();
const elapsedMs = Date.now() - startedMsLocal;
setAnalysisStates(prev => ({
...prev,
[analysisType]: {
...prev[analysisType],
content: aggregate,
loading: false,
error: null,
elapsed_ms: elapsedMs,
}
}));
setAnalysisRecords(prev => prev.map(record =>
record.type === analysisType
? {
...record,
status: 'done',
end_ts: endTime,
duration_ms: elapsedMs,
}
: record
));
} catch (err) {
if (err && typeof err === 'object' && (err as any).name === 'AbortError') {
setAnalysisStates(prev => ({
...prev,
[analysisType]: { content: '', loading: false, error: null }
}));
setAnalysisRecords(prev => prev.map(record =>
record.type === analysisType
? { ...record, status: 'pending', start_ts: undefined }
: record
));
analysisFetchedRefs.current[analysisType] = false;
break;
}
const errorMessage = err instanceof Error ? err.message : '加载失败';
const endTime = new Date().toISOString();
setAnalysisStates(prev => ({
...prev,
[analysisType]: {
content: '',
loading: false,
error: errorMessage
}
}));
setAnalysisRecords(prev => prev.map(record =>
record.type === analysisType
? {
...record,
status: 'error',
end_ts: endTime,
error: errorMessage
}
: record
));
} finally {
setCurrentAnalysisTask(null);
currentAnalysisTypeRef.current = null;
analysisFetchedRefs.current[analysisType] = true;
}
}
} finally {
isAnalysisRunningRef.current = false;
}
};
runAnalysesSequentially();
}, [isLoading, error, financials, activeAnalysisConfig, analysisTypes, normalizedMarket, unifiedSymbol, startTime, manualRunKey]);
const stopAll = () => {
stopRequestedRef.current = true;
abortControllerRef.current?.abort();
abortControllerRef.current = null;
isAnalysisRunningRef.current = false;
if (currentAnalysisTypeRef.current) {
analysisFetchedRefs.current[currentAnalysisTypeRef.current] = false;
}
setCurrentAnalysisTask(null);
setStartTime(null);
};
const continuePending = () => {
if (isAnalysisRunningRef.current) return;
stopRequestedRef.current = false;
setStartTime((prev) => (prev == null ? Date.now() - elapsedSeconds * 1000 : prev));
setManualRunKey((k) => k + 1);
};
const triggerAnalysis = async () => {
const reqId = await triggerAnalysisRequest(unifiedSymbol, normalizedMarket || '', selectedTemplateId);
if (reqId) setRequestId(reqId);
};
return {
activeAnalysisConfig, // Exported
analysisTypes,
analysisStates,
analysisRecords,
currentAnalysisTask,
triggerAnalysis,
triggering,
requestId,
setRequestId,
taskProgress,
startTime,
elapsedSeconds,
completionProgress,
totalElapsedMs,
stopAll,
continuePending,
retryAnalysis,
hasRunningTask,
isAnalysisRunning: isAnalysisRunningRef.current,
selectedTemplateId, // Exported
setSelectedTemplateId, // Exported
};
}

View File

@ -1,143 +1,30 @@
'use client'; 'use client';
import { Tabs, TabsList, TabsTrigger, TabsContent } from '@/components/ui/tabs'; import React from 'react';
import { useReportData } from './hooks/useReportData'; import { useParams, useSearchParams } from 'next/navigation';
import { useAnalysisRunner } from './hooks/useAnalysisRunner'; import { WorkflowReportLayout } from './components/WorkflowReportLayout';
import { ReportHeader } from './components/ReportHeader';
import { TaskStatus } from './components/TaskStatus';
import { StockChart } from './components/StockChart';
import { FinancialTable } from './components/FinancialTable';
import { AnalysisContent } from './components/AnalysisContent';
import { ExecutionDetails } from './components/ExecutionDetails';
export default function ReportPage() { export default function ReportPage() {
const { // Next.js 15 params handling
unifiedSymbol, // Note: In client components, hooks like useParams() handle the async nature internally or return current values
displayMarket, const params = useParams();
normalizedMarket, const searchParams = useSearchParams();
marketParam,
financials,
isLoading,
error,
snapshot,
snapshotLoading,
realtime,
realtimeLoading,
realtimeError,
financialConfig,
templateSets,
} = useReportData();
const { const symbol = typeof params.symbol === 'string' ? decodeURIComponent(params.symbol) : '';
activeAnalysisConfig, const initialTemplateId = searchParams.get('template_id') || undefined;
analysisTypes, const initialMarket = searchParams.get('market') || undefined;
analysisStates, const initialRequestId = searchParams.get('request_id') || undefined;
analysisRecords,
currentAnalysisTask, if (!symbol) {
triggerAnalysis, return <div className="p-8 text-center text-red-500">Invalid Symbol</div>;
triggering, }
requestId,
taskProgress,
startTime,
elapsedSeconds,
completionProgress,
totalElapsedMs,
stopAll,
continuePending,
retryAnalysis,
hasRunningTask,
isAnalysisRunning,
selectedTemplateId,
setSelectedTemplateId,
} = useAnalysisRunner(financials, financialConfig, normalizedMarket, unifiedSymbol, isLoading, error, templateSets);
return ( return (
<div className="space-y-4"> <WorkflowReportLayout
<div className="flex items-stretch justify-between gap-4"> symbol={symbol}
<ReportHeader initialMarket={initialMarket}
unifiedSymbol={unifiedSymbol} initialTemplateId={initialTemplateId}
displayMarket={displayMarket} initialRequestId={initialRequestId}
isLoading={isLoading}
financials={financials}
snapshot={snapshot}
snapshotLoading={snapshotLoading}
triggering={triggering}
hasRunningTask={hasRunningTask}
isAnalysisRunning={isAnalysisRunning}
onStartAnalysis={triggerAnalysis}
onStopAnalysis={stopAll}
onContinueAnalysis={continuePending}
templateSets={templateSets}
selectedTemplateId={selectedTemplateId}
onSelectTemplate={setSelectedTemplateId}
/> />
<TaskStatus
requestId={requestId}
taskProgress={taskProgress}
startTime={startTime}
elapsedSeconds={elapsedSeconds}
completionProgress={completionProgress}
currentAnalysisTask={currentAnalysisTask}
analysisConfig={activeAnalysisConfig}
/>
</div>
<Tabs defaultValue="chart" className="mt-4">
<TabsList className="flex-wrap">
<TabsTrigger value="chart"></TabsTrigger>
<TabsTrigger value="financial"></TabsTrigger>
{analysisTypes.map(type => (
<TabsTrigger key={type} value={type}>
{type === 'company_profile' ? '公司简介' : (activeAnalysisConfig?.analysis_modules?.[type]?.name || type)}
</TabsTrigger>
))}
<TabsTrigger value="execution"></TabsTrigger>
</TabsList>
<TabsContent value="chart">
<StockChart
unifiedSymbol={unifiedSymbol}
marketParam={marketParam}
realtime={realtime}
realtimeLoading={realtimeLoading}
realtimeError={realtimeError}
/>
</TabsContent>
<TabsContent value="financial">
<FinancialTable
financials={financials}
isLoading={isLoading}
error={error}
financialConfig={activeAnalysisConfig}
/>
</TabsContent>
{analysisTypes.map(analysisType => (
<TabsContent key={analysisType} value={analysisType}>
<AnalysisContent
analysisType={analysisType}
state={analysisStates[analysisType] || { content: '', loading: false, error: null }}
financials={financials}
analysisConfig={activeAnalysisConfig}
retryAnalysis={retryAnalysis}
currentAnalysisTask={currentAnalysisTask}
/>
</TabsContent>
))}
<TabsContent value="execution">
<ExecutionDetails
financials={financials}
isLoading={isLoading}
error={error}
analysisRecords={analysisRecords}
currentAnalysisTask={currentAnalysisTask}
totalElapsedMs={totalElapsedMs}
retryAnalysis={retryAnalysis}
/>
</TabsContent>
</Tabs>
</div>
); );
} }

View File

@ -115,7 +115,11 @@ export function TradingViewWidget({
// 延迟到下一帧,确保容器已插入并可获取 iframe.contentWindow // 延迟到下一帧,确保容器已插入并可获取 iframe.contentWindow
requestAnimationFrame(() => { requestAnimationFrame(() => {
try { try {
if (container.isConnected) { // 再次检查容器是否仍然连接在DOM上避免组件卸载后执行
if (container && container.isConnected) {
// TradingView 的 embed 脚本会在内部创建 iframe
// 如果容器正在被卸载,或者 iframe 尚未完全准备好,可能会触发该错误
// 我们只是 append script实际的 iframe 是由 TradingView 脚本注入的
container.appendChild(script); container.appendChild(script);
} }
} catch (e) { } catch (e) {
@ -126,11 +130,9 @@ export function TradingViewWidget({
} }
return () => { return () => {
const c = containerRef.current; // 清理函数
if (c) { if (containerRef.current) {
try { containerRef.current.innerHTML = '';
c.innerHTML = '';
} catch {}
} }
}; };
}, [symbol, market]); }, [symbol, market]);

View File

@ -77,7 +77,7 @@ export interface StatusBarState {
retryable?: boolean; retryable?: boolean;
} }
// 预定义的执行步骤已移至 ExecutionStepManager // 预定义的执行步骤已移至 ExecutionStepManager (Deleted)
// ============================================================================ // ============================================================================
// 主组件 // 主组件

View File

@ -0,0 +1,69 @@
import React from 'react';
import ReactMarkdown from 'react-markdown';
import remarkGfm from 'remark-gfm';
import { ScrollArea } from '@/components/ui/scroll-area';
import { Card, CardContent, CardHeader, CardTitle } from '@/components/ui/card';
import { Badge } from '@/components/ui/badge';
import { TaskStatus, TaskType } from '@/types/workflow';
import { Loader2 } from 'lucide-react';
interface TaskOutputViewerProps {
taskId: string;
taskName: string;
taskType: TaskType;
status: TaskStatus;
content: string;
}
export function TaskOutputViewer({
taskId,
taskName,
taskType,
status,
content
}: TaskOutputViewerProps) {
return (
<Card className="h-full flex flex-col border-l-0 rounded-l-none shadow-none">
<CardHeader className="flex flex-row items-center justify-between py-4">
<div className="flex flex-col gap-1">
<CardTitle className="text-lg font-medium">{taskName}</CardTitle>
<div className="flex items-center gap-2 text-xs text-muted-foreground">
<span className="font-mono">{taskId}</span>
<span></span>
<span>{taskType}</span>
</div>
</div>
<div className="flex items-center gap-2">
<Badge variant={
status === 'Completed' ? 'default' : // 'success' variant might not exist in default shadcn, using default (primary)
status === 'Failed' ? 'destructive' :
status === 'Running' ? 'secondary' :
'outline'
}>
{status === 'Running' && <Loader2 className="w-3 h-3 mr-1 animate-spin" />}
{status}
</Badge>
</div>
</CardHeader>
<CardContent className="flex-1 p-0 min-h-0">
<ScrollArea className="h-full w-full p-4">
{content ? (
<div className="prose dark:prose-invert max-w-none text-sm">
<ReactMarkdown remarkPlugins={[remarkGfm]}>
{content}
</ReactMarkdown>
</div>
) : (
<div className="flex h-full items-center justify-center text-muted-foreground text-sm italic">
{status === 'Pending' ? 'Waiting to start...' :
status === 'Running' ? 'Processing...' :
'No output available'}
</div>
)}
</ScrollArea>
</CardContent>
</Card>
);
}

View File

@ -0,0 +1,159 @@
import React, { useState, useEffect } from 'react';
import { WorkflowDag, TaskNode, TaskStatus, TaskType } from '@/types/workflow';
import { TaskOutputViewer } from './TaskOutputViewer';
import { Card } from '@/components/ui/card';
import { Badge } from '@/components/ui/badge';
import { ScrollArea } from '@/components/ui/scroll-area';
import { cn } from '@/lib/utils';
import {
CheckCircle2,
Circle,
Clock,
AlertCircle,
Loader2,
SkipForward,
Database,
FileText,
BrainCircuit
} from 'lucide-react';
interface WorkflowVisualizerProps {
dag: WorkflowDag;
taskStates: Record<string, TaskStatus>;
taskOutputs: Record<string, string>;
className?: string;
}
const TYPE_ORDER: Record<TaskType, number> = {
'DataFetch': 1,
'DataProcessing': 2,
'Analysis': 3
};
const TYPE_ICONS: Record<TaskType, React.ReactNode> = {
'DataFetch': <Database className="w-4 h-4" />,
'DataProcessing': <FileText className="w-4 h-4" />,
'Analysis': <BrainCircuit className="w-4 h-4" />
};
export function WorkflowVisualizer({
dag,
taskStates,
taskOutputs,
className
}: WorkflowVisualizerProps) {
const [selectedTaskId, setSelectedTaskId] = useState<string | null>(null);
// Sort nodes by type then name
const sortedNodes = [...dag.nodes].sort((a, b) => {
const typeScoreA = TYPE_ORDER[a.type] || 99;
const typeScoreB = TYPE_ORDER[b.type] || 99;
if (typeScoreA !== typeScoreB) return typeScoreA - typeScoreB;
return a.name.localeCompare(b.name);
});
// Auto-select first node or running node if none selected
useEffect(() => {
if (!selectedTaskId && sortedNodes.length > 0) {
// Try to find a running node, or the first one
const runningNode = sortedNodes.find(n => taskStates[n.id] === 'Running');
setSelectedTaskId(runningNode ? runningNode.id : sortedNodes[0].id);
}
}, [dag, taskStates, selectedTaskId, sortedNodes]);
const selectedNode = dag.nodes.find(n => n.id === selectedTaskId);
return (
<div className={cn("flex h-[600px] border rounded-lg overflow-hidden bg-background", className)}>
{/* Left Sidebar: Task List */}
<div className="w-1/3 min-w-[250px] border-r bg-muted/10 flex flex-col">
<div className="p-4 border-b bg-muted/20">
<h3 className="font-semibold text-sm text-foreground">Workflow Tasks</h3>
<p className="text-xs text-muted-foreground mt-1">
{dag.nodes.length} steps in pipeline
</p>
</div>
<ScrollArea className="flex-1">
<div className="p-2 space-y-1">
{sortedNodes.map(node => (
<TaskListItem
key={node.id}
node={node}
status={taskStates[node.id] || node.initial_status}
isSelected={selectedTaskId === node.id}
onClick={() => setSelectedTaskId(node.id)}
/>
))}
</div>
</ScrollArea>
</div>
{/* Right Content: Output Viewer */}
<div className="flex-1 min-w-0 bg-card">
{selectedNode ? (
<TaskOutputViewer
taskId={selectedNode.id}
taskName={selectedNode.name}
taskType={selectedNode.type}
status={taskStates[selectedNode.id] || selectedNode.initial_status}
content={taskOutputs[selectedNode.id] || ''}
/>
) : (
<div className="h-full flex items-center justify-center text-muted-foreground">
Select a task to view details
</div>
)}
</div>
</div>
);
}
function TaskListItem({
node,
status,
isSelected,
onClick
}: {
node: TaskNode;
status: TaskStatus;
isSelected: boolean;
onClick: () => void;
}) {
return (
<button
onClick={onClick}
className={cn(
"w-full flex items-center gap-3 p-3 text-sm text-left rounded-md transition-colors",
"hover:bg-accent hover:text-accent-foreground",
isSelected ? "bg-accent text-accent-foreground shadow-sm" : "text-muted-foreground"
)}
>
<StatusIcon status={status} />
<div className="flex-1 min-w-0">
<div className="font-medium truncate">{node.name}</div>
<div className="flex items-center gap-1 text-xs opacity-70 mt-0.5">
{TYPE_ICONS[node.type]}
<span>{node.type}</span>
</div>
</div>
</button>
);
}
function StatusIcon({ status }: { status: TaskStatus }) {
switch (status) {
case 'Completed':
return <CheckCircle2 className="w-4 h-4 text-green-500" />;
case 'Failed':
return <AlertCircle className="w-4 h-4 text-red-500" />;
case 'Running':
return <Loader2 className="w-4 h-4 text-blue-500 animate-spin" />;
case 'Scheduled':
return <Clock className="w-4 h-4 text-yellow-500" />;
case 'Skipped':
return <SkipForward className="w-4 h-4 text-gray-400" />;
default: // Pending
return <Circle className="w-4 h-4 text-gray-300" />;
}
}

View File

@ -55,28 +55,26 @@ export function useDataRequest() {
} }
// 用于轮询任务进度
// [DEPRECATED] Used for polling, logic removed.
// Backend now pushes progress via SSE.
export function useTaskProgress(requestId: string | null, options?: SWRConfiguration) { export function useTaskProgress(requestId: string | null, options?: SWRConfiguration) {
const { data, error, isLoading } = useSWR( const { data, error, isLoading } = useSWR<TaskProgress[]>(
requestId ? `/api/tasks/${requestId}` : null, null, // Disable polling
fetcher, fetcher,
{ options
refreshInterval: 2000, // 每2秒轮询一次
...options,
errorRetryCount: 2,
}
); );
const isFinished = !isLoading && (data?.status?.includes('completed') || data?.status?.includes('failed') || !data);
return { return {
progress: data, tasks: [],
isLoading, progress: null,
isError: error, isLoading: false,
isFinished, isError: null,
isFinished: false,
}; };
} }
// --- Analysis Results Hooks (NEW) --- // --- Analysis Results Hooks (NEW) ---
export function useAnalysisResults(symbol?: string) { export function useAnalysisResults(symbol?: string) {
@ -257,11 +255,18 @@ export async function updateConfig(payload: Partial<SystemConfig>) {
return updated; return updated;
} }
export async function testConfig(type: string, data: unknown) { export async function testConfig(type: string, data: any) {
const res = await fetch('/api/config/test', { // Flat the data object to match backend expectation (#[serde(flatten)])
const payload = {
type,
...(typeof data === 'object' ? data : {})
};
// Use /api/configs/test to match backend /v1/configs/test via Rewrite
const res = await fetch('/api/configs/test', {
method: 'POST', method: 'POST',
headers: { 'Content-Type': 'application/json' }, headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ type, data }), body: JSON.stringify(payload),
}); });
const text = await res.text(); const text = await res.text();
if (!res.ok) { if (!res.ok) {

View File

@ -0,0 +1,206 @@
import { useState, useRef, useCallback, useEffect } from 'react';
import {
WorkflowEvent,
WorkflowDag,
TaskStatus,
StartWorkflowRequest,
StartWorkflowResponse
} from '@/types/workflow';
export type WorkflowConnectionStatus = 'idle' | 'connecting' | 'connected' | 'disconnected' | 'error';
interface UseWorkflowReturn {
// State
status: WorkflowConnectionStatus;
requestId: string | null;
dag: WorkflowDag | null;
taskStates: Record<string, TaskStatus>;
taskOutputs: Record<string, string>; // Accumulates streaming content
error: string | null;
finalResult: any | null;
// Actions
// Returns StartWorkflowResponse to allow caller to handle redirects (e.g. symbol normalization)
startWorkflow: (params: StartWorkflowRequest) => Promise<StartWorkflowResponse | undefined>;
connectToWorkflow: (requestId: string) => void;
disconnect: () => void;
}
export function useWorkflow(): UseWorkflowReturn {
const [status, setStatus] = useState<WorkflowConnectionStatus>('idle');
const [requestId, setRequestId] = useState<string | null>(null);
const [dag, setDag] = useState<WorkflowDag | null>(null);
const [taskStates, setTaskStates] = useState<Record<string, TaskStatus>>({});
const [taskOutputs, setTaskOutputs] = useState<Record<string, string>>({});
const [error, setError] = useState<string | null>(null);
const [finalResult, setFinalResult] = useState<any | null>(null);
// Ref for EventSource to handle cleanup
const eventSourceRef = useRef<EventSource | null>(null);
// Refs for state that updates frequently to avoid closure staleness in event handlers if needed
// (Though in this React pattern, simple state updates usually suffice unless high freq)
const disconnect = useCallback(() => {
if (eventSourceRef.current) {
eventSourceRef.current.close();
eventSourceRef.current = null;
}
setStatus('disconnected');
}, []);
const handleEvent = useCallback((eventData: WorkflowEvent) => {
switch (eventData.type) {
case 'WorkflowStarted':
setDag(eventData.payload.task_graph);
// Initialize states based on graph
const initialStates: Record<string, TaskStatus> = {};
eventData.payload.task_graph.nodes.forEach(node => {
initialStates[node.id] = node.initial_status;
});
setTaskStates(initialStates);
break;
case 'TaskStateChanged':
setTaskStates(prev => ({
...prev,
[eventData.payload.task_id]: eventData.payload.status
}));
break;
case 'TaskStreamUpdate':
setTaskOutputs(prev => ({
...prev,
[eventData.payload.task_id]: (prev[eventData.payload.task_id] || '') + eventData.payload.content_delta
}));
break;
case 'WorkflowStateSnapshot':
// Restore full state
setDag(eventData.payload.task_graph);
setTaskStates(eventData.payload.tasks_status);
// Restore outputs if present
const outputs: Record<string, string> = {};
Object.entries(eventData.payload.tasks_output).forEach(([k, v]) => {
if (v) outputs[k] = v;
});
setTaskOutputs(prev => ({ ...prev, ...outputs }));
break;
case 'WorkflowCompleted':
setFinalResult(eventData.payload.result_summary);
disconnect(); // Close connection on completion
break;
case 'WorkflowFailed':
setError(eventData.payload.reason);
// We might want to keep connected or disconnect depending on if retry is possible
// For now, treat fatal error as disconnect reason
if (eventData.payload.is_fatal) {
disconnect();
setStatus('error');
}
break;
}
}, [disconnect]);
const connectToWorkflow = useCallback((id: string) => {
if (eventSourceRef.current) {
eventSourceRef.current.close();
}
setRequestId(id);
setStatus('connecting');
setError(null);
try {
const es = new EventSource(`/api/workflow/events/${id}`);
eventSourceRef.current = es;
es.onopen = () => {
setStatus('connected');
};
es.onmessage = (event) => {
try {
const data = JSON.parse(event.data) as WorkflowEvent;
handleEvent(data);
} catch (e) {
console.error('Failed to parse workflow event:', e);
}
};
es.onerror = (e) => {
console.error('Workflow SSE error:', e);
// EventSource automatically retries, but we might want to handle it explicitly
// For now, let's assume if readyState is CLOSED, it's a fatal error
if (es.readyState === EventSource.CLOSED) {
setStatus('error');
setError('Connection lost');
es.close();
}
};
} catch (e) {
console.error('Failed to create EventSource:', e);
setStatus('error');
setError(e instanceof Error ? e.message : 'Connection initialization failed');
}
}, [handleEvent]);
const startWorkflow = useCallback(async (params: StartWorkflowRequest) => {
setStatus('connecting');
setError(null);
setDag(null);
setTaskStates({});
setTaskOutputs({});
setFinalResult(null);
try {
const res = await fetch('/api/workflow/start', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(params),
});
if (!res.ok) {
const errorBody = await res.json().catch(() => ({}));
throw new Error(errorBody.error || `HTTP ${res.status}`);
}
const data: StartWorkflowResponse = await res.json();
// Start listening
connectToWorkflow(data.request_id);
return data; // Return response so UI can handle symbol normalization redirection
} catch (e) {
setStatus('error');
setError(e instanceof Error ? e.message : 'Failed to start workflow');
return undefined;
}
}, [connectToWorkflow]);
// Cleanup on unmount
useEffect(() => {
return () => {
if (eventSourceRef.current) {
eventSourceRef.current.close();
}
};
}, []);
return {
status,
requestId,
dag,
taskStates,
taskOutputs,
error,
finalResult,
startWorkflow,
connectToWorkflow,
disconnect
};
}

View File

@ -1,434 +0,0 @@
/**
* ExecutionStepManager -
*
*
*
*
* -
* -
* -
* -
* -
*
* @author Financial Analysis Platform Team
* @version 1.0.0
*/
// ============================================================================
// 类型定义
// ============================================================================
/**
*
*/
export interface ExecutionStep {
/** 步骤唯一标识符 */
id: string;
/** 步骤显示名称 */
name: string;
/** 步骤详细描述 */
description: string;
/** 执行函数(可选) */
execute?: () => Promise<void>;
}
/**
*
*/
export interface ExecutionOptions {
/** 步骤开始回调 */
onStepStart?: (step: ExecutionStep, index: number, total: number) => void;
/** 步骤完成回调 */
onStepComplete?: (step: ExecutionStep, index: number, total: number) => void;
/** 步骤错误回调 */
onStepError?: (step: ExecutionStep, index: number, total: number, error: Error) => void;
/** 全部完成回调 */
onComplete?: () => void;
/** 执行错误回调 */
onError?: (error: Error) => void;
/** 最大重试次数 */
maxRetries?: number;
/** 重试延迟(毫秒) */
retryDelay?: number;
/** 出错时是否继续执行 */
continueOnError?: boolean;
}
/**
*
*/
export interface ExecutionContext {
/** 当前执行步骤 */
currentStep: ExecutionStep | null;
/** 当前步骤索引 */
stepIndex: number;
/** 总步骤数 */
totalSteps: number;
/** 是否正在运行 */
isRunning: boolean;
/** 是否有错误 */
hasError: boolean;
/** 错误信息 */
errorMessage?: string;
/** 重试次数 */
retryCount: number;
/** 最大重试次数 */
maxRetries: number;
/** 是否可重试 */
canRetry: boolean;
}
export class ExecutionStepManager {
private steps: ExecutionStep[] = [];
private context: ExecutionContext = {
currentStep: null,
stepIndex: 0,
totalSteps: 0,
isRunning: false,
hasError: false,
errorMessage: undefined,
retryCount: 0,
maxRetries: 0,
canRetry: false
};
private options: ExecutionOptions = {};
constructor(steps: ExecutionStep[] = [], options: ExecutionOptions = {}) {
this.steps = [...steps];
this.options = {
maxRetries: 2,
retryDelay: 1000,
continueOnError: false,
...options
};
this.updateContext();
}
/**
*
*/
addStep(step: ExecutionStep): void {
this.steps.push(step);
this.updateContext();
}
/**
*
*/
addSteps(steps: ExecutionStep[]): void {
this.steps.push(...steps);
this.updateContext();
}
/**
*
*/
insertStep(index: number, step: ExecutionStep): void {
this.steps.splice(index, 0, step);
this.updateContext();
}
/**
*
*/
removeStep(stepId: string): boolean {
const index = this.steps.findIndex(step => step.id === stepId);
if (index !== -1) {
this.steps.splice(index, 1);
this.updateContext();
return true;
}
return false;
}
/**
*
*/
clearSteps(): void {
this.steps = [];
this.updateContext();
}
/**
*
*/
getSteps(): ExecutionStep[] {
return [...this.steps];
}
/**
*
*/
getContext(): ExecutionContext {
return { ...this.context };
}
/**
*
*/
setOptions(options: ExecutionOptions): void {
this.options = { ...this.options, ...options };
}
/**
*
*/
async execute(): Promise<void> {
if (this.context.isRunning) {
throw new Error('Execution is already in progress');
}
if (this.steps.length === 0) {
throw new Error('No steps to execute');
}
this.context.isRunning = true;
this.context.hasError = false;
this.context.errorMessage = undefined;
this.context.stepIndex = 0;
this.context.retryCount = 0;
this.context.maxRetries = this.options.maxRetries || 2;
try {
for (let i = 0; i < this.steps.length; i++) {
const step = this.steps[i];
this.context.currentStep = step;
this.context.stepIndex = i;
// 通知步骤开始
this.options.onStepStart?.(step, i, this.steps.length);
let stepSuccess = false;
let lastError: Error | null = null;
// 重试逻辑
for (let retryAttempt = 0; retryAttempt <= this.context.maxRetries; retryAttempt++) {
try {
this.context.retryCount = retryAttempt;
// 如果是重试,等待一段时间
if (retryAttempt > 0 && this.options.retryDelay) {
await new Promise(resolve => setTimeout(resolve, this.options.retryDelay));
}
// 执行步骤(如果有执行函数)
if (step.execute) {
await step.execute();
}
stepSuccess = true;
break; // 成功执行,跳出重试循环
} catch (stepError) {
lastError = stepError instanceof Error ? stepError : new Error(String(stepError));
// 如果还有重试机会,继续重试
if (retryAttempt < this.context.maxRetries) {
console.warn(`Step "${step.name}" failed, retrying (${retryAttempt + 1}/${this.context.maxRetries + 1}):`, lastError.message);
continue;
}
}
}
if (stepSuccess) {
// 通知步骤完成
this.options.onStepComplete?.(step, i, this.steps.length);
} else {
// 所有重试都失败了
const error = lastError || new Error('Step execution failed');
// 更新错误状态
this.context.hasError = true;
this.context.errorMessage = error.message;
this.context.canRetry = true;
// 通知步骤错误
this.options.onStepError?.(step, i, this.steps.length, error);
// 如果不继续执行,抛出错误
if (!this.options.continueOnError) {
throw error;
}
}
}
// 所有步骤执行完成
this.options.onComplete?.();
} catch (error) {
const execError = error instanceof Error ? error : new Error(String(error));
// 通知执行错误
this.options.onError?.(execError);
// 重新抛出错误
throw execError;
} finally {
this.context.isRunning = false;
}
}
/**
*
*/
async executeStep(stepId: string): Promise<void> {
const stepIndex = this.steps.findIndex(step => step.id === stepId);
if (stepIndex === -1) {
throw new Error(`Step with id '${stepId}' not found`);
}
const step = this.steps[stepIndex];
this.context.currentStep = step;
this.context.stepIndex = stepIndex;
this.context.isRunning = true;
this.context.hasError = false;
this.context.errorMessage = undefined;
try {
// 通知步骤开始
this.options.onStepStart?.(step, stepIndex, this.steps.length);
// 执行步骤
if (step.execute) {
await step.execute();
}
// 通知步骤完成
this.options.onStepComplete?.(step, stepIndex, this.steps.length);
} catch (stepError) {
const error = stepError instanceof Error ? stepError : new Error(String(stepError));
// 更新错误状态
this.context.hasError = true;
this.context.errorMessage = error.message;
// 通知步骤错误
this.options.onStepError?.(step, stepIndex, this.steps.length, error);
throw error;
} finally {
this.context.isRunning = false;
}
}
/**
*
*/
stop(): void {
this.context.isRunning = false;
}
/**
*
*/
async retry(): Promise<void> {
if (!this.context.hasError || !this.context.canRetry) {
throw new Error('No failed step to retry');
}
if (this.context.isRunning) {
throw new Error('Execution is already in progress');
}
// 重置错误状态
this.context.hasError = false;
this.context.errorMessage = undefined;
this.context.canRetry = false;
// 重新执行从当前步骤开始
try {
await this.execute();
} catch (error) {
// 错误已经在execute方法中处理
throw error;
}
}
/**
*
*/
reset(): void {
this.context = {
currentStep: null,
stepIndex: 0,
totalSteps: this.steps.length,
isRunning: false,
hasError: false,
errorMessage: undefined,
retryCount: 0,
maxRetries: this.options.maxRetries || 2,
canRetry: false
};
}
/**
*
*/
isRunning(): boolean {
return this.context.isRunning;
}
/**
*
*/
hasError(): boolean {
return this.context.hasError;
}
/**
*
*/
getErrorMessage(): string | undefined {
return this.context.errorMessage;
}
/**
*
*/
canRetry(): boolean {
return this.context.canRetry;
}
/**
*
*/
private updateContext(): void {
this.context.totalSteps = this.steps.length;
this.context.maxRetries = this.options.maxRetries || 2;
if (!this.context.isRunning) {
this.context.stepIndex = 0;
this.context.currentStep = null;
this.context.retryCount = 0;
}
}
/**
*
*/
static createWithSteps(steps: ExecutionStep[], options: ExecutionOptions = {}): ExecutionStepManager {
return new ExecutionStepManager(steps, options);
}
/**
*
*/
static create(options: ExecutionOptions = {}): ExecutionStepManager {
return new ExecutionStepManager([], options);
}
}
/**
*
*/
export const DEFAULT_EXECUTION_STEPS: ExecutionStep[] = [
{
id: 'fetch_financial_data',
name: '正在读取财务数据',
description: '从Tushare API获取公司财务指标数据'
}
];
/**
*
*/
export function createDefaultStepManager(options: ExecutionOptions = {}): ExecutionStepManager {
return ExecutionStepManager.createWithSteps(DEFAULT_EXECUTION_STEPS, options);
}

View File

@ -0,0 +1,79 @@
export interface TimeSeriesFinancialDto {
symbol: string;
metric_name: string;
period_date: string;
value: number;
source?: string;
}
export interface FinancialTableRow {
metric: string;
[year: string]: string | number | undefined;
}
export interface FinancialTableData {
headers: string[]; // Sorted years
rows: FinancialTableRow[];
}
/**
* Transforms a flat list of TimeSeriesFinancialDto into a pivoted table structure.
*
* Input:
* [
* { metric_name: "Revenue", period_date: "2023-12-31", value: 100, source: "tushare" },
* { metric_name: "Revenue", period_date: "2022-12-31", value: 90, source: "tushare" },
* ]
*
* Output:
* {
* headers: ["2023", "2022"],
* rows: [
* { metric: "Revenue", "2023": 100, "2022": 90 }
* ]
* }
*/
export function transformFinancialData(data: TimeSeriesFinancialDto[]): FinancialTableData {
if (!data || data.length === 0) {
return { headers: [], rows: [] };
}
// 1. Collect all unique years (from period_date)
const yearsSet = new Set<string>();
// 2. Group by metric name
const metricMap = new Map<string, Record<string, number | string>>();
data.forEach(item => {
if (!item.period_date) return;
// Extract year from "YYYY-MM-DD"
const year = item.period_date.substring(0, 4);
yearsSet.add(year);
if (!metricMap.has(item.metric_name)) {
metricMap.set(item.metric_name, { metric: item.metric_name });
}
const row = metricMap.get(item.metric_name)!;
// Handle potential conflicts:
// If multiple sources provide data, we currently just overwrite or keep the last one.
// A better approach might be to append source info or average, but for a table view, single value is cleaner.
// Maybe prioritize sources? Tushare > YFinance > AlphaVantage?
// For now, we just use the value.
// We could format it with source: "100 (Tushare)" but that breaks numeric sorting/formatting.
row[year] = item.value;
});
// Sort years descending (newest first)
const headers = Array.from(yearsSet).sort((a, b) => Number(b) - Number(a));
// Create rows
const rows = Array.from(metricMap.values()).map(row => {
// Ensure all header keys exist (fill with undefined/null if needed)
return row as FinancialTableRow;
});
return { headers, rows };
}

View File

@ -342,6 +342,23 @@ export interface ConfigSaveState {
timestamp?: number; timestamp?: number;
} }
/**
* Task Status Enum (Matches Backend TaskStatus)
*/
export type TaskStatus = 'queued' | 'in_progress' | 'completed' | 'failed';
/**
* Task Progress DTO (Matches Backend TaskProgress)
*/
export interface TaskProgress {
request_id: string;
task_name: string;
status: TaskStatus;
progress_percent: number;
details: string;
started_at: string; // ISO8601
}
// ============================================================================ // ============================================================================
// API 相关类型 // API 相关类型
// ============================================================================ // ============================================================================

View File

@ -0,0 +1,32 @@
import {
AnalysisTemplateSet,
AnalysisResultDto,
TaskProgress
} from '@/types';
export interface ReportState {
// 1. Context
symbol: string;
market: string;
templateId: string;
templateConfig: AnalysisTemplateSet | null;
// 2. Phase Status
fetchStatus: 'idle' | 'fetching' | 'complete' | 'error';
analysisStatus: 'idle' | 'running' | 'complete' | 'error';
// 3. Data
fundamentalData: unknown[];
analysisResults: Record<string, AnalysisResultDto>; // Key: ModuleID
tasks: TaskProgress[];
// 4. Progress
requestId: string | null;
executionMeta: {
startTime: number;
elapsed: number;
tokens: number;
};
error: string | null;
}

View File

@ -0,0 +1,119 @@
/**
* Workflow Types Definition
* Corresponds to backend Rust types in `common-contracts/src/messages.rs`
*
*
* 1.
* 2.
*/
// ============================================================================
// Enums
// ============================================================================
export type TaskType = 'DataFetch' | 'DataProcessing' | 'Analysis';
export type TaskStatus =
| 'Pending' // 等待依赖
| 'Scheduled' // 依赖满足,已下发给 Worker
| 'Running' // Worker 正在执行
| 'Completed' // 执行成功
| 'Failed' // 执行失败
| 'Skipped'; // 因上游失败或策略原因被跳过
// ============================================================================
// Graph Structure (DAG)
// ============================================================================
export interface TaskNode {
id: string;
name: string;
type: TaskType;
initial_status: TaskStatus;
}
export interface TaskDependency {
from: string;
to: string;
}
export interface WorkflowDag {
nodes: TaskNode[];
edges: TaskDependency[];
}
// ============================================================================
// Events (Server-Sent Events Payloads)
// ============================================================================
/**
* Base interface for all workflow events
* Discriminated union based on 'type' field
*/
export type WorkflowEvent =
| {
type: 'WorkflowStarted';
payload: {
timestamp: number;
task_graph: WorkflowDag;
};
}
| {
type: 'TaskStateChanged';
payload: {
task_id: string;
task_type: TaskType;
status: TaskStatus;
message: string | null;
timestamp: number;
};
}
| {
type: 'TaskStreamUpdate';
payload: {
task_id: string;
content_delta: string;
index: number;
};
}
| {
type: 'WorkflowCompleted';
payload: {
result_summary: any; // JSON Value
end_timestamp: number;
};
}
| {
type: 'WorkflowFailed';
payload: {
reason: string;
is_fatal: boolean;
end_timestamp: number;
};
}
| {
type: 'WorkflowStateSnapshot';
payload: {
timestamp: number;
task_graph: WorkflowDag;
tasks_status: Record<string, TaskStatus>;
tasks_output: Record<string, string | null>;
};
};
// ============================================================================
// API Request/Response Types
// ============================================================================
export interface StartWorkflowRequest {
symbol: string;
market?: string;
template_id: string;
}
export interface StartWorkflowResponse {
request_id: string;
symbol: string;
market: string;
}

5
keys.md Normal file
View File

@ -0,0 +1,5 @@
alphaventage_key=PUOO7UPTNXN325NN
openrouter_url=https://openrouter.ai/api/v1
openrouter_key=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
tushare_key="f62b415de0a5a947fcb693b66cd299dd6242868bf04ad687800c7f3f"
finnhub_key="d3fjs5pr01qolkndil0gd3fjs5pr01qolkndil10"

63
scripts/check_services.sh Executable file
View File

@ -0,0 +1,63 @@
#!/bin/bash
# Define the services to check (order matters for dependencies)
SERVICES=(
"services/common-contracts"
"services/data-persistence-service"
"services/workflow-orchestrator-service"
"services/api-gateway"
"services/report-generator-service"
"services/alphavantage-provider-service"
"services/tushare-provider-service"
"services/finnhub-provider-service"
"services/yfinance-provider-service"
)
echo "========================================================"
echo " RUST SERVICES COMPILATION CHECK SEQUENCE "
echo "========================================================"
echo ""
FAIL_COUNT=0
FAILED_SERVICES=()
for service_path in "${SERVICES[@]}"; do
echo "--------------------------------------------------------"
echo ">>> CHECKING: $service_path"
echo "--------------------------------------------------------"
if [ -d "$service_path" ]; then
pushd "$service_path" > /dev/null
# Run cargo check with SQLX_OFFLINE=true
if SQLX_OFFLINE=true cargo check --tests --all-features; then
echo "✅ SUCCESS: $service_path compiled successfully."
else
echo "❌ FAILURE: $service_path failed to compile."
FAIL_COUNT=$((FAIL_COUNT+1))
FAILED_SERVICES+=("$service_path")
fi
popd > /dev/null
else
echo "⚠️ WARNING: Directory $service_path not found!"
fi
echo ""
done
echo "========================================================"
echo " CHECK COMPLETE "
echo "========================================================"
if [ $FAIL_COUNT -eq 0 ]; then
echo "🎉 All services passed cargo check!"
exit 0
else
echo "💥 $FAIL_COUNT services failed to compile:"
for failed in "${FAILED_SERVICES[@]}"; do
echo " - $failed"
done
echo ""
echo "Please review errors above."
exit 1
fi

48
scripts/inspect_logs.sh Executable file
View File

@ -0,0 +1,48 @@
#!/bin/bash
# Define the services we are interested in
SERVICES=(
"api-gateway"
"report-generator-service"
"data-persistence-service"
"alphavantage-provider-service"
"tushare-provider-service"
"finnhub-provider-service"
"yfinance-provider-service"
"workflow-orchestrator-service"
"fundamental_analysis-nats-1"
)
# Get line count from first argument, default to 10
LINES_INPUT=${1:-10}
echo "========================================================"
echo " FUNDAMENTAL ANALYSIS SYSTEM STATUS REPORT "
echo "========================================================"
echo "Showing last $LINES_INPUT lines of logs per service"
echo ""
for service in "${SERVICES[@]}"; do
echo "--------------------------------------------------------"
echo ">>> SERVICE: $service"
echo "--------------------------------------------------------"
if docker ps -a --format '{{.Names}}' | grep -q "^${service}$"; then
STATUS=$(docker inspect --format='{{.State.Status}}' "$service")
echo "Status: $STATUS"
echo "Logs (Last $LINES_INPUT lines):"
echo ""
# Execute docker logs directly without extra piping that might buffer output weirdly in some shells, though unlikely.
# The issue might be how variables are expanded or env.
# Using simple variable expansion.
docker logs "$service" --tail $LINES_INPUT 2>&1
else
echo "Status: NOT FOUND / NOT RUNNING"
fi
echo ""
echo ""
done
echo "========================================================"
echo " END OF LOG REPORT "
echo "========================================================"

141
scripts/run_component_tests.sh Executable file
View File

@ -0,0 +1,141 @@
#!/bin/bash
set -e
# Configuration
COMPOSE_FILE="docker-compose.test.yml"
export NATS_ADDR="nats://localhost:4223"
export DATA_PERSISTENCE_SERVICE_URL="http://localhost:3001/api/v1"
# For services that might need direct DB access (e.g. persistence tests)
export DATABASE_URL="postgresql://postgres:postgres@localhost:5433/fundamental_test"
# Fake Service Host config for providers
export SERVICE_HOST="localhost"
export API_GATEWAY_URL="http://localhost:4000" # Mock
# Keys (Injected for testing)
export ALPHAVANTAGE_API_KEY="PUOO7UPTNXN325NN"
export TUSHARE_API_KEY="f62b415de0a5a947fcb693b66cd299dd6242868bf04ad687800c7f3f"
export FINNHUB_API_KEY="d3fjs5pr01qolkndil0gd3fjs5pr01qolkndil10"
export OPENROUTER_API_KEY="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"
export OPENROUTER_API_URL="https://openrouter.ai/api/v1"
# Common config for services
export SERVER_PORT=0 # Use random/no port for tests to avoid config errors
# Default URLs (From Frontend Defaults)
export ALPHAVANTAGE_MCP_URL="https://mcp.alphavantage.co/mcp"
export TUSHARE_API_URL="http://api.tushare.pro"
export FINNHUB_API_URL="https://finnhub.io/api/v1"
export YFINANCE_API_URL="https://query1.finance.yahoo.com" # Generic default
# Check for MCP URL (now set by default above, but good to keep check if user wants override)
if [ -z "$ALPHAVANTAGE_MCP_URL" ]; then
echo -e "\033[1;33m[WARNING]\033[0m ALPHAVANTAGE_MCP_URL is not set. Integration tests using it will fail."
echo "Please set it via: export ALPHAVANTAGE_MCP_URL='...'"
# Set a dummy for now to prevent crash, but test will fail connection
export ALPHAVANTAGE_MCP_URL="http://localhost:9999/sse"
fi
function log() {
echo -e "\033[1;34m[TEST-RUNNER]\033[0m $1"
}
function start_env() {
log "Starting test infrastructure..."
docker-compose -f $COMPOSE_FILE up -d --build
log "Waiting for services to be healthy..."
# Simple wait loop for persistence service
local max_retries=30
local count=0
while ! curl -s http://localhost:3001/health > /dev/null; do
sleep 2
count=$((count+1))
if [ $count -ge $max_retries ]; then
log "Error: Timeout waiting for persistence service."
exit 1
fi
echo -n "."
done
echo ""
log "Infrastructure is ready!"
}
function stop_env() {
log "Stopping test infrastructure..."
docker-compose -f $COMPOSE_FILE down -v
log "Environment destroyed."
}
function run_tests_in_dir() {
local dir=$1
log "Running tests in $dir..."
if ! (cd "$dir" && cargo test -- --nocapture); then
log "Tests failed in $dir"
return 1
fi
}
function run_tests() {
local package=$1
local services_dir="services"
if [ -n "$package" ]; then
if [ -d "$services_dir/$package" ]; then
if ! run_tests_in_dir "$services_dir/$package"; then
exit 1
fi
else
log "Error: Package directory '$services_dir/$package' not found."
exit 1
fi
else
log "Running ALL tests in services/ directory..."
for dir in "$services_dir"/*; do
if [ -d "$dir" ] && [ -f "$dir/Cargo.toml" ]; then
if ! run_tests_in_dir "$dir"; then
log "Aborting due to test failure in $dir."
exit 1
fi
fi
done
fi
}
function check_env_ready() {
if curl -s http://localhost:3001/health > /dev/null; then
return 0
else
return 1
fi
}
# CLI Argument Parsing
case "$1" in
"prepare"|"start")
if check_env_ready; then
log "Environment is already running."
else
start_env
fi
;;
"destroy"|"stop")
stop_env
;;
"test")
# Verify environment is ready
if ! check_env_ready; then
log "Error: Test environment is NOT ready."
log "Please run '$0 prepare' first to start the infrastructure."
exit 1
fi
run_tests $2
;;
*)
echo "Usage: $0 {prepare|destroy|test [package_name]}"
echo " prepare (start): Start test infrastructure (Docker)"
echo " destroy (stop): Stop and cleanup test infrastructure"
echo " test: Run cargo test (requires environment to be ready). Optional: provide package name."
exit 1
;;
esac

40
scripts/run_e2e.sh Executable file
View File

@ -0,0 +1,40 @@
#!/bin/bash
set -e
ROOT_DIR=$(pwd)
# Function to cleanup on exit
cleanup() {
echo "[E2E] Dumping logs for report-generator-service..."
docker logs report-generator-service || true
echo "[E2E] Tearing down environment..."
cd "$ROOT_DIR"
docker-compose -f docker-compose.yml -f docker-compose.e2e.yml down
}
# Trap exit to ensure cleanup
trap cleanup EXIT
echo "[E2E] Building and Starting Environment..."
# Build specifically the services we need to ensure latest code
docker-compose -f docker-compose.yml -f docker-compose.e2e.yml up -d --build --remove-orphans
echo "[E2E] Waiting for API Gateway (localhost:4000)..."
MAX_RETRIES=30
count=0
until curl -s http://localhost:4000/health > /dev/null; do
count=$((count+1))
if [ $count -ge $MAX_RETRIES ]; then
echo "Timeout waiting for Gateway"
exit 1
fi
echo "Waiting for Gateway... ($count/$MAX_RETRIES)"
sleep 2
done
echo "Gateway is ready!"
echo "[E2E] Running Rust Test Runner..."
cd tests/end-to-end
RUST_LOG=info cargo run
echo "[E2E] Tests Completed Successfully!"

View File

@ -352,12 +352,17 @@ dependencies = [
name = "common-contracts" name = "common-contracts"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow",
"chrono", "chrono",
"log",
"reqwest",
"rust_decimal", "rust_decimal",
"serde", "serde",
"serde_json", "serde_json",
"service_kit", "service_kit",
"sqlx", "sqlx",
"tokio",
"tracing",
"utoipa", "utoipa",
"uuid", "uuid",
] ]
@ -2168,10 +2173,11 @@ dependencies = [
[[package]] [[package]]
name = "rmcp" name = "rmcp"
version = "0.8.5" version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5947688160b56fb6c827e3c20a72c90392a1d7e9dec74749197aa1780ac42ca" checksum = "acc36ea743d4bbc97e9f3c33bf0b97765a5cf338de3d9c3d2f321a6e38095615"
dependencies = [ dependencies = [
"async-trait",
"base64", "base64",
"chrono", "chrono",
"futures", "futures",
@ -2193,9 +2199,9 @@ dependencies = [
[[package]] [[package]]
name = "rmcp-macros" name = "rmcp-macros"
version = "0.8.5" version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "01263441d3f8635c628e33856c468b96ebbce1af2d3699ea712ca71432d4ee7a" checksum = "263caba1c96f2941efca0fdcd97b03f42bcde52d2347d05e5d77c93ab18c5b58"
dependencies = [ dependencies = [
"darling", "darling",
"proc-macro2", "proc-macro2",

View File

@ -13,7 +13,7 @@ tower-http = { version = "0.6.6", features = ["cors"] }
common-contracts = { path = "../common-contracts" } common-contracts = { path = "../common-contracts" }
# Generic MCP Client # Generic MCP Client
rmcp = { version = "0.8.5", features = ["client", "transport-streamable-http-client-reqwest"] } rmcp = { version = "0.9.0", features = ["client", "transport-streamable-http-client-reqwest"] }
# Message Queue (NATS) # Message Queue (NATS)
async-nats = "0.45.0" async-nats = "0.45.0"

View File

@ -7,6 +7,10 @@ pub struct AppConfig {
pub nats_addr: String, pub nats_addr: String,
pub data_persistence_service_url: String, pub data_persistence_service_url: String,
pub alphavantage_api_key: Option<SecretString>, pub alphavantage_api_key: Option<SecretString>,
// New fields
pub api_gateway_url: String,
pub service_host: String,
} }
impl AppConfig { impl AppConfig {
@ -22,6 +26,16 @@ impl AppConfig {
"DATA_PERSISTENCE_SERVICE_URL must not be empty".to_string(), "DATA_PERSISTENCE_SERVICE_URL must not be empty".to_string(),
)); ));
} }
if cfg.api_gateway_url.trim().is_empty() {
return Err(config::ConfigError::Message(
"API_GATEWAY_URL must not be empty".to_string(),
));
}
if cfg.service_host.trim().is_empty() {
return Err(config::ConfigError::Message(
"SERVICE_HOST must not be empty".to_string(),
));
}
Ok(cfg) Ok(cfg)
} }

View File

@ -4,7 +4,7 @@ mod config;
mod error; mod error;
mod mapping; mod mapping;
mod message_consumer; mod message_consumer;
mod persistence; // mod persistence; // Removed
mod state; mod state;
mod worker; mod worker;
mod av_client; mod av_client;
@ -14,7 +14,10 @@ mod transport;
use crate::config::AppConfig; use crate::config::AppConfig;
use crate::error::Result; use crate::error::Result;
use crate::state::AppState; use crate::state::AppState;
use tracing::info; use tracing::{info, warn};
use common_contracts::lifecycle::ServiceRegistrar;
use common_contracts::registry::ServiceRegistration;
use std::sync::Arc;
#[tokio::main] #[tokio::main]
async fn main() -> Result<()> { async fn main() -> Result<()> {
@ -30,7 +33,7 @@ async fn main() -> Result<()> {
let port = config.server_port; let port = config.server_port;
// Initialize application state // Initialize application state
let app_state = AppState::new(config)?; let app_state = AppState::new(config.clone())?;
// --- Start the config poller --- // --- Start the config poller ---
tokio::spawn(config_poller::run_config_poller(app_state.clone())); tokio::spawn(config_poller::run_config_poller(app_state.clone()));
@ -41,12 +44,60 @@ async fn main() -> Result<()> {
// --- Start the message consumer --- // --- Start the message consumer ---
tokio::spawn(message_consumer::run(app_state)); tokio::spawn(message_consumer::run(app_state));
// --- Service Registration ---
let registrar = ServiceRegistrar::new(
config.api_gateway_url.clone(),
ServiceRegistration {
service_id: format!("{}-{}", "alphavantage-provider", uuid::Uuid::new_v4()),
service_name: "alphavantage".to_string(),
role: common_contracts::registry::ServiceRole::DataProvider,
base_url: format!("http://{}:{}", config.service_host, port),
health_check_url: format!("http://{}:{}/health", config.service_host, port),
}
);
let _ = registrar.register().await;
let registrar = Arc::new(registrar);
tokio::spawn(registrar.clone().start_heartbeat_loop());
// Start the HTTP server // Start the HTTP server
let listener = tokio::net::TcpListener::bind(format!("0.0.0.0:{}", port)) let listener = tokio::net::TcpListener::bind(format!("0.0.0.0:{}", port))
.await .await
.unwrap(); .unwrap();
info!("HTTP server listening on port {}", port); info!("HTTP server listening on port {}", port);
axum::serve(listener, app).await.unwrap();
axum::serve(listener, app)
.with_graceful_shutdown(shutdown_signal(registrar))
.await
.unwrap();
Ok(()) Ok(())
} }
async fn shutdown_signal(registrar: Arc<ServiceRegistrar>) {
let ctrl_c = async {
tokio::signal::ctrl_c()
.await
.expect("failed to install Ctrl+C handler");
};
#[cfg(unix)]
let terminate = async {
tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate())
.expect("failed to install signal handler")
.recv()
.await;
};
#[cfg(not(unix))]
let terminate = std::future::pending::<()>();
tokio::select! {
_ = ctrl_c => {},
_ = terminate => {},
}
info!("Shutdown signal received, deregistering service...");
let _ = registrar.deregister().await;
}

View File

@ -1,12 +1,11 @@
use crate::error::Result; use crate::error::Result;
use crate::state::{AppState, ServiceOperationalStatus}; use crate::state::{AppState, ServiceOperationalStatus};
use common_contracts::messages::FetchCompanyDataCommand; use common_contracts::messages::FetchCompanyDataCommand;
use common_contracts::subjects::NatsSubject;
use futures_util::StreamExt; use futures_util::StreamExt;
use std::time::Duration; use std::time::Duration;
use tracing::{error, info, warn}; use tracing::{error, info, warn};
const SUBJECT_NAME: &str = "data_fetch_commands";
pub async fn run(state: AppState) -> Result<()> { pub async fn run(state: AppState) -> Result<()> {
info!("Starting NATS message consumer..."); info!("Starting NATS message consumer...");
@ -38,10 +37,11 @@ pub async fn run(state: AppState) -> Result<()> {
} }
async fn subscribe_and_process(state: AppState, client: async_nats::Client) -> Result<()> { async fn subscribe_and_process(state: AppState, client: async_nats::Client) -> Result<()> {
let mut subscriber = client.subscribe(SUBJECT_NAME.to_string()).await?; let subject = NatsSubject::DataFetchCommands.to_string();
let mut subscriber = client.subscribe(subject.clone()).await?;
info!( info!(
"Consumer started, waiting for messages on subject '{}'", "Consumer started, waiting for messages on subject '{}'",
SUBJECT_NAME subject
); );
while let Some(message) = subscriber.next().await { while let Some(message) = subscriber.next().await {
@ -59,12 +59,17 @@ async fn subscribe_and_process(state: AppState, client: async_nats::Client) -> R
tokio::spawn(async move { tokio::spawn(async move {
match serde_json::from_slice::<FetchCompanyDataCommand>(&message.payload) { match serde_json::from_slice::<FetchCompanyDataCommand>(&message.payload) {
Ok(command) => { Ok(command) => {
let request_id = command.request_id;
info!("Deserialized command for symbol: {}", command.symbol); info!("Deserialized command for symbol: {}", command.symbol);
if let Err(e) = if let Err(e) =
crate::worker::handle_fetch_command(state_clone, command, publisher_clone) crate::worker::handle_fetch_command(state_clone.clone(), command, publisher_clone)
.await .await
{ {
error!("Error handling fetch command: {:?}", e); error!("Error handling fetch command: {:?}", e);
if let Some(mut task) = state_clone.tasks.get_mut(&request_id) {
task.status = common_contracts::observability::TaskStatus::Failed;
task.details = format!("Worker failed: {}", e);
}
} }
} }
Err(e) => { Err(e) => {

View File

@ -1,80 +0,0 @@
//!
//! 数据持久化客户端
//!
//! 提供一个类型化的接口,用于与 `data-persistence-service` 进行通信。
//!
use crate::error::Result;
use common_contracts::{
dtos::{CompanyProfileDto, RealtimeQuoteDto, TimeSeriesFinancialBatchDto, TimeSeriesFinancialDto},
};
use tracing::info;
#[derive(Clone)]
pub struct PersistenceClient {
client: reqwest::Client,
base_url: String,
}
impl PersistenceClient {
pub fn new(base_url: String) -> Self {
Self {
client: reqwest::Client::new(),
base_url,
}
}
pub async fn get_company_profile(&self, symbol: &str) -> Result<Option<CompanyProfileDto>> {
let url = format!("{}/companies/{}", self.base_url, symbol);
let resp = self.client.get(&url).send().await?;
if resp.status() == reqwest::StatusCode::NOT_FOUND {
return Ok(None);
}
let profile = resp.error_for_status()?.json().await?;
Ok(Some(profile))
}
pub async fn upsert_company_profile(&self, profile: CompanyProfileDto) -> Result<()> {
let url = format!("{}/companies", self.base_url);
info!("Upserting company profile for {} to {}", profile.symbol, url);
self.client
.put(&url)
.json(&profile)
.send()
.await?
.error_for_status()?;
Ok(())
}
pub async fn upsert_realtime_quote(&self, quote: RealtimeQuoteDto) -> Result<()> {
let url = format!("{}/market-data/quotes", self.base_url);
info!("Upserting realtime quote for {} to {}", quote.symbol, url);
self.client
.post(&url)
.json(&quote)
.send()
.await?
.error_for_status()?;
Ok(())
}
pub async fn batch_insert_financials(&self, dtos: Vec<TimeSeriesFinancialDto>) -> Result<()> {
if dtos.is_empty() {
return Ok(());
}
let url = format!("{}/market-data/financials/batch", self.base_url);
let symbol = dtos[0].symbol.clone();
info!("Batch inserting {} financial statements for {} to {}", dtos.len(), symbol, url);
let batch = TimeSeriesFinancialBatchDto { records: dtos };
self.client
.post(&url)
.json(&batch)
.send()
.await?
.error_for_status()?;
Ok(())
}
}

View File

@ -46,7 +46,12 @@ impl AppState {
let mut provider_guard = self.av_provider.write().await; let mut provider_guard = self.av_provider.write().await;
let mut status_guard = self.status.write().await; let mut status_guard = self.status.write().await;
match (api_key, api_url) { // Fallback to default URL if not provided
let final_url = api_url
.filter(|s| !s.trim().is_empty())
.or_else(|| Some("https://mcp.alphavantage.co/mcp".to_string()));
match (api_key, final_url) {
(Some(key), Some(base_url)) => { (Some(key), Some(base_url)) => {
if base_url.contains('?') { if base_url.contains('?') {
*provider_guard = None; *provider_guard = None;

View File

@ -27,7 +27,10 @@ pub struct CustomHttpClient {
impl Default for CustomHttpClient { impl Default for CustomHttpClient {
fn default() -> Self { fn default() -> Self {
Self { Self {
client: reqwest::Client::new(), client: reqwest::Client::builder()
.timeout(std::time::Duration::from_secs(30))
.build()
.unwrap_or_else(|_| reqwest::Client::new()),
} }
} }
} }

View File

@ -1,11 +1,12 @@
use crate::error::{Result, AppError}; use crate::error::{Result, AppError};
use crate::mapping::{CombinedFinancials, parse_company_profile, parse_financials, parse_realtime_quote}; use crate::mapping::{CombinedFinancials, parse_company_profile, parse_financials, parse_realtime_quote};
use crate::persistence::PersistenceClient; use common_contracts::persistence_client::PersistenceClient;
use common_contracts::dtos::{SessionDataDto, ProviderCacheDto, TimeSeriesFinancialDto, CompanyProfileDto};
use crate::state::{AppState, TaskStore}; use crate::state::{AppState, TaskStore};
use anyhow::Context; use anyhow::Context;
use chrono::{Utc, Datelike}; use chrono::{Utc, Datelike, Duration};
use common_contracts::messages::{FetchCompanyDataCommand, FinancialsPersistedEvent}; use common_contracts::messages::{FetchCompanyDataCommand, FinancialsPersistedEvent, DataFetchFailedEvent};
use common_contracts::observability::TaskProgress; use common_contracts::observability::{TaskProgress, TaskStatus};
use tracing::{error, info, instrument, warn}; use tracing::{error, info, instrument, warn};
use uuid::Uuid; use uuid::Uuid;
use serde_json::Value; use serde_json::Value;
@ -15,13 +16,59 @@ pub async fn handle_fetch_command(
state: AppState, state: AppState,
command: FetchCompanyDataCommand, command: FetchCompanyDataCommand,
publisher: async_nats::Client, publisher: async_nats::Client,
) -> Result<()> {
match handle_fetch_command_inner(state.clone(), &command, &publisher).await {
Ok(_) => Ok(()),
Err(e) => {
error!("AlphaVantage workflow failed: {}", e);
// Publish failure event
let event = DataFetchFailedEvent {
request_id: command.request_id,
symbol: command.symbol.clone(),
error: e.to_string(),
provider_id: Some("alphavantage".to_string()),
};
let _ = publisher
.publish(
"events.data.fetch_failed".to_string(),
serde_json::to_vec(&event).unwrap().into(),
)
.await;
// Update task status
if let Some(mut task) = state.tasks.get_mut(&command.request_id) {
task.status = TaskStatus::Failed;
task.details = format!("Failed: {}", e);
} else {
// If task doesn't exist (e.g. failed at insert), create a failed task
let task = TaskProgress {
request_id: command.request_id,
task_name: format!("alphavantage:{}", command.symbol),
status: TaskStatus::Failed,
progress_percent: 0,
details: format!("Failed: {}", e),
started_at: Utc::now(),
};
state.tasks.insert(command.request_id, task);
}
Err(e)
}
}
}
async fn handle_fetch_command_inner(
state: AppState,
command: &FetchCompanyDataCommand,
publisher: &async_nats::Client,
) -> Result<()> { ) -> Result<()> {
info!("Handling fetch data command."); info!("Handling fetch data command.");
let task = TaskProgress { let task = TaskProgress {
request_id: command.request_id, request_id: command.request_id,
task_name: format!("fetch_data_for_{}", command.symbol), task_name: format!("alphavantage:{}", command.symbol),
status: "in_progress".to_string(), status: TaskStatus::InProgress,
progress_percent: 0, progress_percent: 0,
details: "Initializing...".to_string(), details: "Initializing...".to_string(),
started_at: Utc::now(), started_at: Utc::now(),
@ -32,13 +79,6 @@ pub async fn handle_fetch_command(
Some(p) => p, Some(p) => p,
None => { None => {
let reason = "Execution failed: Alphavantage provider is not available (misconfigured).".to_string(); let reason = "Execution failed: Alphavantage provider is not available (misconfigured).".to_string();
error!("{}", reason);
update_task_progress(
&state.tasks,
command.request_id,
100,
&reason,
).await;
return Err(AppError::ProviderNotAvailable(reason)); return Err(AppError::ProviderNotAvailable(reason));
} }
}; };
@ -47,62 +87,48 @@ pub async fn handle_fetch_command(
PersistenceClient::new(state.config.data_persistence_service_url.clone()); PersistenceClient::new(state.config.data_persistence_service_url.clone());
let symbol = command.symbol.clone(); let symbol = command.symbol.clone();
// Check freshness // Symbol conversion using shared logic
let mut is_fresh = false; let av_symbol = symbol.to_alphavantage();
match persistence_client.get_company_profile(&command.symbol).await {
Ok(Some(p)) => {
if let Some(updated_at) = p.updated_at {
let age = chrono::Utc::now() - updated_at;
if age < chrono::Duration::hours(24) {
info!("Data for {} is fresh (age: {}h). Skipping fetch.", command.symbol, age.num_hours());
is_fresh = true;
}
}
}
Ok(None) => {}
Err(e) => tracing::warn!("Failed to check profile freshness: {}", e),
}
if is_fresh {
let event = FinancialsPersistedEvent {
request_id: command.request_id,
symbol: command.symbol,
years_updated: vec![],
template_id: command.template_id,
};
let subject = "events.data.financials_persisted".to_string();
publisher
.publish(subject, serde_json::to_vec(&event).unwrap().into())
.await?;
update_task_progress(
&state.tasks,
command.request_id,
100,
"Data retrieved from cache",
)
.await;
return Ok(());
}
// Symbol conversion for Chinese stocks
let av_symbol = if symbol.ends_with(".SH") {
symbol.replace(".SH", ".SS")
} else {
symbol.clone()
};
info!("Using symbol for AlphaVantage: {}", av_symbol); info!("Using symbol for AlphaVantage: {}", av_symbol);
update_task_progress( update_task_progress(
&state.tasks, &state.tasks,
command.request_id, command.request_id,
10, 10,
"Fetching from AlphaVantage...", "Checking cache...",
None,
) )
.await; .await;
// --- 1. Fetch all data in parallel --- // --- 1. Check Cache ---
let (overview_json, income_json, balance_json, cashflow_json, quote_json) = { let cache_key = format!("alphavantage:{}:all", av_symbol);
let (overview_json, income_json, balance_json, cashflow_json, quote_json) = match persistence_client.get_cache(&cache_key).await.map_err(|e| AppError::Internal(e.to_string()))? {
Some(cache_entry) => {
info!("Cache HIT for {}", cache_key);
// Deserialize tuple of JSONs
let data: (Value, Value, Value, Value, Value) = serde_json::from_value(cache_entry.data_payload)
.map_err(|e| AppError::Internal(format!("Failed to deserialize cache: {}", e)))?;
update_task_progress(
&state.tasks,
command.request_id,
50,
"Data retrieved from cache",
None,
).await;
data
},
None => {
info!("Cache MISS for {}", cache_key);
update_task_progress(
&state.tasks,
command.request_id,
20,
"Fetching from AlphaVantage API...",
None,
).await;
let params_overview = vec![("symbol", av_symbol.as_str())]; let params_overview = vec![("symbol", av_symbol.as_str())];
let params_income = vec![("symbol", av_symbol.as_str())]; let params_income = vec![("symbol", av_symbol.as_str())];
let params_balance = vec![("symbol", av_symbol.as_str())]; let params_balance = vec![("symbol", av_symbol.as_str())];
@ -116,7 +142,7 @@ pub async fn handle_fetch_command(
let cashflow_task = client.query("CASH_FLOW", &params_cashflow); let cashflow_task = client.query("CASH_FLOW", &params_cashflow);
let quote_task = client.query("GLOBAL_QUOTE", &params_quote); let quote_task = client.query("GLOBAL_QUOTE", &params_quote);
match tokio::try_join!( let data = match tokio::try_join!(
overview_task, overview_task,
income_task, income_task,
balance_task, balance_task,
@ -127,41 +153,60 @@ pub async fn handle_fetch_command(
Err(e) => { Err(e) => {
let error_msg = format!("Failed to fetch data from AlphaVantage: {}", e); let error_msg = format!("Failed to fetch data from AlphaVantage: {}", e);
error!(error_msg); error!(error_msg);
update_task_progress(&state.tasks, command.request_id, 100, &error_msg).await;
return Err(e); return Err(e);
} }
};
// Write to Cache
let payload = serde_json::json!(data);
persistence_client.set_cache(&ProviderCacheDto {
cache_key,
data_payload: payload,
expires_at: Utc::now() + Duration::hours(24),
updated_at: None,
}).await.map_err(|e| AppError::Internal(e.to_string()))?;
data
} }
}; };
update_task_progress( update_task_progress(
&state.tasks, &state.tasks,
command.request_id, command.request_id,
50, 70,
"Data fetched, transforming and persisting...", "Data fetched, processing...",
None,
) )
.await; .await;
// --- 2. Transform and persist data --- // --- 2. Transform and Snapshot Data ---
// Profile
// Check if overview_json is empty (Symbol field check) // 2.1 Profile
if let Some(_symbol_val) = overview_json.get("Symbol") { if let Some(_symbol_val) = overview_json.get("Symbol") {
match parse_company_profile(overview_json) { match parse_company_profile(overview_json) {
Ok(profile_to_persist) => { Ok(profile_to_persist) => {
persistence_client // Update Global Profile
.upsert_company_profile(profile_to_persist) // REMOVED: upsert_company_profile is deprecated.
.await?; // let _ = persistence_client.upsert_company_profile(profile_to_persist.clone()).await;
// Snapshot Profile
persistence_client.insert_session_data(&SessionDataDto {
request_id: command.request_id,
symbol: command.symbol.to_string(),
provider: "alphavantage".to_string(),
data_type: "company_profile".to_string(),
data_payload: serde_json::to_value(&profile_to_persist).unwrap(),
created_at: None,
}).await.map_err(|e| AppError::Internal(e.to_string()))?;
}, },
Err(e) => { Err(e) => {
warn!("Failed to parse CompanyProfile: {}", e); warn!("Failed to parse CompanyProfile: {}", e);
} }
} }
} else {
warn!("CompanyProfile data is empty or missing 'Symbol' for {}, skipping persistence.", av_symbol);
} }
// Financials // 2.2 Financials
let mut years_updated: Vec<u16> = Vec::new(); let mut years_updated: Vec<u16> = Vec::new();
// Only attempt to parse financials if we have data (simple check if income statement has annualReports)
if income_json.get("annualReports").is_some() { if income_json.get("annualReports").is_some() {
let combined_financials = CombinedFinancials { let combined_financials = CombinedFinancials {
income: income_json, income: income_json,
@ -175,25 +220,28 @@ pub async fn handle_fetch_command(
.iter() .iter()
.map(|f| f.period_date.year() as u16) .map(|f| f.period_date.year() as u16)
.collect(); .collect();
persistence_client
.batch_insert_financials(financials_to_persist) // Snapshot Financials
.await?; persistence_client.insert_session_data(&SessionDataDto {
request_id: command.request_id,
symbol: command.symbol.to_string(),
provider: "alphavantage".to_string(),
data_type: "financial_statements".to_string(),
data_payload: serde_json::to_value(&financials_to_persist).unwrap(),
created_at: None,
}).await.map_err(|e| AppError::Internal(e.to_string()))?;
} }
}, },
Err(e) => { Err(e) => {
warn!("Failed to parse Financials: {}", e); warn!("Failed to parse Financials: {}", e);
} }
} }
} else {
warn!("Financial data missing for {}, skipping.", av_symbol);
} }
// Quote // 2.3 Quote
// Fix Python-dict string if necessary // Fix Python-dict string if necessary
let fixed_quote_json = if let Some(s) = quote_json.as_str() { let fixed_quote_json = if let Some(s) = quote_json.as_str() {
if s.trim().starts_with("{'Global Quote'") { if s.trim().starts_with("{'Global Quote'") {
// Attempt to replace single quotes with double quotes
// Note: This is a naive fix but works for the expected format
let fixed = s.replace("'", "\""); let fixed = s.replace("'", "\"");
match serde_json::from_str::<Value>(&fixed) { match serde_json::from_str::<Value>(&fixed) {
Ok(v) => v, Ok(v) => v,
@ -209,13 +257,24 @@ pub async fn handle_fetch_command(
quote_json quote_json
}; };
// Realtime quote is global/time-series, so we still use upsert_realtime_quote
let mut summary = format!("Fetched {} years of financial data", years_updated.len());
match parse_realtime_quote(fixed_quote_json, &command.market) { match parse_realtime_quote(fixed_quote_json, &command.market) {
Ok(mut quote_to_persist) => { Ok(mut quote_to_persist) => {
// Restore original symbol if we converted it quote_to_persist.symbol = command.symbol.to_string();
quote_to_persist.symbol = command.symbol.clone(); // Snapshot Realtime Quote
persistence_client let _ = persistence_client.insert_session_data(&SessionDataDto {
.upsert_realtime_quote(quote_to_persist) request_id: command.request_id,
.await?; symbol: command.symbol.to_string(),
provider: "alphavantage".to_string(),
data_type: "realtime_quote".to_string(),
data_payload: serde_json::to_value(&quote_to_persist).unwrap(),
created_at: None,
}).await;
summary = format!("Parsed Realtime Quote for {}: Price={}, Volume={:?}",
quote_to_persist.symbol, quote_to_persist.price, quote_to_persist.volume);
}, },
Err(e) => { Err(e) => {
warn!("Failed to parse RealtimeQuote: {}", e); warn!("Failed to parse RealtimeQuote: {}", e);
@ -226,36 +285,130 @@ pub async fn handle_fetch_command(
&state.tasks, &state.tasks,
command.request_id, command.request_id,
90, 90,
"Data persisted, publishing events...", "Snapshot created, publishing events...",
None,
) )
.await; .await;
// --- 3. Publish events --- // --- 3. Publish events ---
// Only publish if we actually updated something
// Actually, we should publish event even if partial, to signal completion?
// The command is "FetchCompanyData", implies success if we fetched *available* data.
let event = FinancialsPersistedEvent { let event = FinancialsPersistedEvent {
request_id: command.request_id, request_id: command.request_id,
symbol: command.symbol, symbol: command.symbol.clone(),
years_updated, years_updated,
template_id: command.template_id, template_id: command.template_id.clone(),
provider_id: Some("alphavantage".to_string()),
data_summary: Some(summary),
}; };
let subject = "events.data.financials_persisted".to_string(); // NATS subject let subject = "events.data.financials_persisted".to_string();
publisher publisher
.publish(subject, serde_json::to_vec(&event).unwrap().into()) .publish(subject, serde_json::to_vec(&event).unwrap().into())
.await?; .await?;
state.tasks.remove(&command.request_id); // Update Provider Status
info!("Task completed successfully (Partial data may be missing if provider lacks coverage)."); // REMOVED: update_provider_status is deprecated or missing in client.
/*
persistence_client.update_provider_status(command.symbol.as_str(), "alphavantage", common_contracts::dtos::ProviderStatusDto {
last_updated: chrono::Utc::now(),
status: TaskStatus::Completed,
data_version: None,
}).await?;
*/
update_task_progress(
&state.tasks,
command.request_id,
100,
"Task completed successfully",
Some(TaskStatus::Completed),
).await;
info!("AlphaVantage task completed successfully.");
Ok(()) Ok(())
} }
async fn update_task_progress(tasks: &TaskStore, request_id: Uuid, percent: u8, details: &str) { async fn update_task_progress(tasks: &TaskStore, request_id: Uuid, percent: u8, details: &str, status: Option<TaskStatus>) {
if let Some(mut task) = tasks.get_mut(&request_id) { if let Some(mut task) = tasks.get_mut(&request_id) {
task.progress_percent = percent; task.progress_percent = percent;
task.details = details.to_string(); task.details = details.to_string();
info!("Task update: {}% - {}", percent, details); if let Some(s) = status {
task.status = s;
}
info!("Task update: {}% - {} (Status: {:?})", percent, details, task.status);
}
}
#[cfg(test)]
mod integration_tests {
use super::*;
use crate::config::AppConfig;
use crate::state::AppState;
use secrecy::SecretString;
use std::time::Duration;
use common_contracts::symbol_utils::{CanonicalSymbol, Market};
#[tokio::test]
async fn test_alphavantage_fetch_flow() {
// Check if running in test environment
if std::env::var("NATS_ADDR").is_err() {
// Skip if env vars not set (e.g. running cargo test without script)
// But better to panic to alert developer
// panic!("Must run integration tests with run_component_tests.sh or set env vars");
println!("Skipping integration test (no environment)");
return;
}
// 1. Environment Variables
// Assumed set by external script, but we double check specific overrides for component test
// NATS_ADDR, DATA_PERSISTENCE_SERVICE_URL, ALPHAVANTAGE_API_KEY, ALPHAVANTAGE_MCP_URL
let api_key = std::env::var("ALPHAVANTAGE_API_KEY")
.unwrap_or_else(|_| "PUOO7UPTNXN325NN".to_string());
let mcp_url = std::env::var("ALPHAVANTAGE_MCP_URL")
.expect("ALPHAVANTAGE_MCP_URL must be set");
let config = AppConfig::load().expect("Failed to load config");
let state = AppState::new(config.clone()).expect("Failed to create state");
// 2. Manual Init Provider (Skip Config Poller)
state.update_provider(
Some(SecretString::new(api_key.into())),
Some(mcp_url)
).await;
// Wait for connection
let mut connected = false;
for _ in 0..10 {
if state.get_provider().await.is_some() {
connected = true;
break;
}
tokio::time::sleep(Duration::from_millis(500)).await;
}
assert!(connected, "Failed to connect to AlphaVantage MCP Provider");
// 3. Construct Command
let request_id = Uuid::new_v4();
let cmd = FetchCompanyDataCommand {
request_id,
symbol: CanonicalSymbol::new("IBM", &Market::US),
market: "US".to_string(),
template_id: Some("default".to_string()),
};
// 4. NATS
let nats_client = async_nats::connect(&config.nats_addr).await
.expect("Failed to connect to NATS");
// 5. Run
let result = handle_fetch_command_inner(state.clone(), &cmd, &nats_client).await;
// 6. Assert
assert!(result.is_ok(), "Worker execution failed: {:?}", result.err());
let task = state.tasks.get(&request_id).expect("Task should exist");
assert_eq!(task.status, TaskStatus::Completed);
} }
} }

View File

@ -49,7 +49,9 @@ version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-nats", "async-nats",
"async-stream",
"axum", "axum",
"chrono",
"common-contracts", "common-contracts",
"config", "config",
"futures-util", "futures-util",
@ -113,6 +115,28 @@ dependencies = [
"url", "url",
] ]
[[package]]
name = "async-stream"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476"
dependencies = [
"async-stream-impl",
"futures-core",
"pin-project-lite",
]
[[package]]
name = "async-stream-impl"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.110",
]
[[package]] [[package]]
name = "async-trait" name = "async-trait"
version = "0.1.89" version = "0.1.89"
@ -345,12 +369,17 @@ dependencies = [
name = "common-contracts" name = "common-contracts"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow",
"chrono", "chrono",
"log",
"reqwest",
"rust_decimal", "rust_decimal",
"serde", "serde",
"serde_json", "serde_json",
"service_kit", "service_kit",
"sqlx", "sqlx",
"tokio",
"tracing",
"utoipa", "utoipa",
"uuid", "uuid",
] ]
@ -644,6 +673,16 @@ dependencies = [
"typeid", "typeid",
] ]
[[package]]
name = "errno"
version = "0.3.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb"
dependencies = [
"libc",
"windows-sys 0.61.2",
]
[[package]] [[package]]
name = "etcetera" name = "etcetera"
version = "0.8.0" version = "0.8.0"
@ -666,6 +705,12 @@ dependencies = [
"pin-project-lite", "pin-project-lite",
] ]
[[package]]
name = "fastrand"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
[[package]] [[package]]
name = "fiat-crypto" name = "fiat-crypto"
version = "0.2.9" version = "0.2.9"
@ -701,6 +746,21 @@ version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
[[package]]
name = "foreign-types"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1"
dependencies = [
"foreign-types-shared",
]
[[package]]
name = "foreign-types-shared"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
[[package]] [[package]]
name = "form_urlencoded" name = "form_urlencoded"
version = "1.2.2" version = "1.2.2"
@ -837,6 +897,25 @@ dependencies = [
"wasm-bindgen", "wasm-bindgen",
] ]
[[package]]
name = "h2"
version = "0.4.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3c0b69cfcb4e1b9f1bf2f53f95f766e4661169728ec61cd3fe5a0166f2d1386"
dependencies = [
"atomic-waker",
"bytes",
"fnv",
"futures-core",
"futures-sink",
"http",
"indexmap",
"slab",
"tokio",
"tokio-util",
"tracing",
]
[[package]] [[package]]
name = "hashbrown" name = "hashbrown"
version = "0.12.3" version = "0.12.3"
@ -973,6 +1052,7 @@ dependencies = [
"bytes", "bytes",
"futures-channel", "futures-channel",
"futures-core", "futures-core",
"h2",
"http", "http",
"http-body", "http-body",
"httparse", "httparse",
@ -1002,6 +1082,22 @@ dependencies = [
"webpki-roots 1.0.4", "webpki-roots 1.0.4",
] ]
[[package]]
name = "hyper-tls"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0"
dependencies = [
"bytes",
"http-body-util",
"hyper",
"hyper-util",
"native-tls",
"tokio",
"tokio-native-tls",
"tower-service",
]
[[package]] [[package]]
name = "hyper-util" name = "hyper-util"
version = "0.1.18" version = "0.1.18"
@ -1021,9 +1117,11 @@ dependencies = [
"percent-encoding", "percent-encoding",
"pin-project-lite", "pin-project-lite",
"socket2", "socket2",
"system-configuration",
"tokio", "tokio",
"tower-service", "tower-service",
"tracing", "tracing",
"windows-registry",
] ]
[[package]] [[package]]
@ -1258,6 +1356,12 @@ dependencies = [
"vcpkg", "vcpkg",
] ]
[[package]]
name = "linux-raw-sys"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039"
[[package]] [[package]]
name = "litemap" name = "litemap"
version = "0.8.1" version = "0.8.1"
@ -1333,6 +1437,23 @@ dependencies = [
"windows-sys 0.61.2", "windows-sys 0.61.2",
] ]
[[package]]
name = "native-tls"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e"
dependencies = [
"libc",
"log",
"openssl",
"openssl-probe",
"openssl-sys",
"schannel",
"security-framework",
"security-framework-sys",
"tempfile",
]
[[package]] [[package]]
name = "nkeys" name = "nkeys"
version = "0.4.5" version = "0.4.5"
@ -1424,12 +1545,50 @@ version = "1.21.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
[[package]]
name = "openssl"
version = "0.10.75"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08838db121398ad17ab8531ce9de97b244589089e290a384c900cb9ff7434328"
dependencies = [
"bitflags",
"cfg-if",
"foreign-types",
"libc",
"once_cell",
"openssl-macros",
"openssl-sys",
]
[[package]]
name = "openssl-macros"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.110",
]
[[package]] [[package]]
name = "openssl-probe" name = "openssl-probe"
version = "0.1.6" version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e"
[[package]]
name = "openssl-sys"
version = "0.9.111"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "82cab2d520aa75e3c58898289429321eb788c3106963d0dc886ec7a5f4adc321"
dependencies = [
"cc",
"libc",
"pkg-config",
"vcpkg",
]
[[package]] [[package]]
name = "ordered-multimap" name = "ordered-multimap"
version = "0.7.3" version = "0.7.3"
@ -1712,7 +1871,7 @@ dependencies = [
"once_cell", "once_cell",
"socket2", "socket2",
"tracing", "tracing",
"windows-sys 0.52.0", "windows-sys 0.60.2",
] ]
[[package]] [[package]]
@ -1870,15 +2029,21 @@ checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f"
dependencies = [ dependencies = [
"base64", "base64",
"bytes", "bytes",
"encoding_rs",
"futures-core", "futures-core",
"futures-util",
"h2",
"http", "http",
"http-body", "http-body",
"http-body-util", "http-body-util",
"hyper", "hyper",
"hyper-rustls", "hyper-rustls",
"hyper-tls",
"hyper-util", "hyper-util",
"js-sys", "js-sys",
"log", "log",
"mime",
"native-tls",
"percent-encoding", "percent-encoding",
"pin-project-lite", "pin-project-lite",
"quinn", "quinn",
@ -1889,13 +2054,16 @@ dependencies = [
"serde_urlencoded", "serde_urlencoded",
"sync_wrapper", "sync_wrapper",
"tokio", "tokio",
"tokio-native-tls",
"tokio-rustls", "tokio-rustls",
"tokio-util",
"tower", "tower",
"tower-http", "tower-http",
"tower-service", "tower-service",
"url", "url",
"wasm-bindgen", "wasm-bindgen",
"wasm-bindgen-futures", "wasm-bindgen-futures",
"wasm-streams",
"web-sys", "web-sys",
"webpki-roots 1.0.4", "webpki-roots 1.0.4",
] ]
@ -2018,6 +2186,19 @@ dependencies = [
"semver", "semver",
] ]
[[package]]
name = "rustix"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e"
dependencies = [
"bitflags",
"errno",
"libc",
"linux-raw-sys",
"windows-sys 0.61.2",
]
[[package]] [[package]]
name = "rustls" name = "rustls"
version = "0.23.35" version = "0.23.35"
@ -2711,12 +2892,46 @@ dependencies = [
"syn 2.0.110", "syn 2.0.110",
] ]
[[package]]
name = "system-configuration"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b"
dependencies = [
"bitflags",
"core-foundation",
"system-configuration-sys",
]
[[package]]
name = "system-configuration-sys"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4"
dependencies = [
"core-foundation-sys",
"libc",
]
[[package]] [[package]]
name = "tap" name = "tap"
version = "1.0.1" version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
[[package]]
name = "tempfile"
version = "3.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16"
dependencies = [
"fastrand",
"getrandom 0.3.4",
"once_cell",
"rustix",
"windows-sys 0.61.2",
]
[[package]] [[package]]
name = "thiserror" name = "thiserror"
version = "1.0.69" version = "1.0.69"
@ -2859,6 +3074,16 @@ dependencies = [
"syn 2.0.110", "syn 2.0.110",
] ]
[[package]]
name = "tokio-native-tls"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2"
dependencies = [
"native-tls",
"tokio",
]
[[package]] [[package]]
name = "tokio-rustls" name = "tokio-rustls"
version = "0.26.4" version = "0.26.4"
@ -3308,6 +3533,19 @@ dependencies = [
"unicode-ident", "unicode-ident",
] ]
[[package]]
name = "wasm-streams"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "15053d8d85c7eccdbefef60f06769760a563c7f0a9d6902a13d35c7800b0ad65"
dependencies = [
"futures-util",
"js-sys",
"wasm-bindgen",
"wasm-bindgen-futures",
"web-sys",
]
[[package]] [[package]]
name = "web-sys" name = "web-sys"
version = "0.3.82" version = "0.3.82"
@ -3397,6 +3635,17 @@ version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
[[package]]
name = "windows-registry"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "02752bf7fbdcce7f2a27a742f798510f3e5ad88dbe84871e5168e2120c3d5720"
dependencies = [
"windows-link",
"windows-result",
"windows-strings",
]
[[package]] [[package]]
name = "windows-result" name = "windows-result"
version = "0.4.1" version = "0.4.1"

View File

@ -15,9 +15,10 @@ common-contracts = { path = "../common-contracts" }
# Message Queue (NATS) # Message Queue (NATS)
async-nats = "0.45.0" async-nats = "0.45.0"
futures-util = "0.3" futures-util = "0.3"
async-stream = "0.3"
# HTTP Client # HTTP Client
reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] } reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls", "stream"] }
# Concurrency & Async # Concurrency & Async
uuid = { version = "1.8", features = ["v4"] } uuid = { version = "1.8", features = ["v4"] }
@ -36,3 +37,4 @@ config = "0.15.19"
# Error Handling # Error Handling
thiserror = "2.0.17" thiserror = "2.0.17"
anyhow = "1.0" anyhow = "1.0"
chrono = "0.4.42"

View File

@ -7,28 +7,31 @@ use axum::{
routing::{get, post}, routing::{get, post},
Router, Router,
}; };
use common_contracts::messages::{FetchCompanyDataCommand, GenerateReportCommand}; use common_contracts::messages::GenerateReportCommand;
use common_contracts::observability::{HealthStatus, ServiceStatus, TaskProgress}; use common_contracts::observability::{TaskProgress, TaskStatus};
use common_contracts::subjects::{NatsSubject, SubjectMessage};
use common_contracts::symbol_utils::{CanonicalSymbol, Market};
use futures_util::future::join_all; use futures_util::future::join_all;
use futures_util::stream::StreamExt;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::collections::HashMap; use tracing::{info, warn, error};
use tracing::{info, warn};
use uuid::Uuid; use uuid::Uuid;
const DATA_FETCH_QUEUE: &str = "data_fetch_commands"; mod registry;
const ANALYSIS_COMMANDS_QUEUE: &str = "analysis.commands.generate_report";
// --- Request/Response Structs --- // --- Request/Response Structs ---
#[derive(Deserialize)] #[derive(Deserialize)]
pub struct DataRequest { pub struct DataRequest {
pub symbol: String, pub symbol: String,
pub market: String, pub market: Option<String>,
pub template_id: Option<String>, pub template_id: String, // Changed to required as it's mandatory for workflow
} }
#[derive(Serialize)] #[derive(Serialize)]
pub struct RequestAcceptedResponse { pub struct RequestAcceptedResponse {
pub request_id: Uuid, pub request_id: Uuid,
pub symbol: String,
pub market: String,
} }
#[derive(Deserialize)] #[derive(Deserialize)]
@ -41,18 +44,40 @@ pub struct AnalysisResultQuery {
pub symbol: String, pub symbol: String,
} }
#[derive(Deserialize)]
pub struct SymbolResolveRequest {
pub symbol: String,
pub market: Option<String>,
}
#[derive(Serialize)]
pub struct SymbolResolveResponse {
pub symbol: String,
pub market: String,
}
// --- Router Definition --- // --- Router Definition ---
pub fn create_router(app_state: AppState) -> Router { pub fn create_router(app_state: AppState) -> Router {
Router::new() Router::new()
.route("/health", get(health_check)) .route("/health", get(health_check))
.route("/tasks", get(get_current_tasks)) // This is the old, stateless one .route("/tasks/{request_id}", get(get_task_progress))
.nest("/v1", create_v1_router()) .nest("/v1", create_v1_router())
.with_state(app_state) .with_state(app_state)
} }
use common_contracts::messages::{StartWorkflowCommand, SyncStateCommand, WorkflowEvent};
fn create_v1_router() -> Router<AppState> { fn create_v1_router() -> Router<AppState> {
Router::new() Router::new()
.route("/data-requests", post(trigger_data_fetch)) // New Workflow API
.route("/workflow/start", post(start_workflow))
.route("/workflow/events/{request_id}", get(workflow_events_stream))
// Tools
.route("/tools/resolve-symbol", post(resolve_symbol))
// Legacy routes (marked for removal or compatibility)
.route("/data-requests", post(trigger_data_fetch_legacy))
.route("/session-data/{request_id}", get(proxy_get_session_data))
.route("/analysis-results/stream", get(proxy_analysis_stream))
.route( .route(
"/analysis-requests/{symbol}", "/analysis-requests/{symbol}",
post(trigger_analysis_generation), post(trigger_analysis_generation),
@ -60,8 +85,7 @@ fn create_v1_router() -> Router<AppState> {
.route("/analysis-results", get(get_analysis_results_by_symbol)) .route("/analysis-results", get(get_analysis_results_by_symbol))
.route("/companies/{symbol}/profile", get(get_company_profile)) .route("/companies/{symbol}/profile", get(get_company_profile))
.route("/market-data/financial-statements/{symbol}", get(get_financials_by_symbol)) .route("/market-data/financial-statements/{symbol}", get(get_financials_by_symbol))
.route("/tasks/{request_id}", get(get_task_progress)) // ... Config routes remain same ...
// --- New Config Routes ---
.route( .route(
"/configs/llm_providers", "/configs/llm_providers",
get(get_llm_providers_config).put(update_llm_providers_config), get(get_llm_providers_config).put(update_llm_providers_config),
@ -76,62 +100,162 @@ fn create_v1_router() -> Router<AppState> {
) )
.route("/configs/test", post(test_data_source_config)) .route("/configs/test", post(test_data_source_config))
.route("/configs/llm/test", post(test_llm_config)) .route("/configs/llm/test", post(test_llm_config))
// --- New Discover Routes ---
.route("/discover-models/{provider_id}", get(discover_models)) .route("/discover-models/{provider_id}", get(discover_models))
.route("/discover-models", post(discover_models_preview)) .route("/discover-models", post(discover_models_preview))
.route("/registry/register", post(registry::register_service))
.route("/registry/heartbeat", post(registry::heartbeat))
.route("/registry/deregister", post(registry::deregister_service))
} }
// --- Health & Stateless Tasks --- // --- Helper Functions ---
async fn health_check(State(state): State<AppState>) -> Json<HealthStatus> {
let mut details = HashMap::new();
// 提供确定性且无副作用的健康详情,避免访问不存在的状态字段
details.insert("message_bus".to_string(), "nats".to_string());
details.insert("nats_addr".to_string(), state.config.nats_addr.clone());
let status = HealthStatus { fn infer_market(symbol: &str) -> String {
module_id: "api-gateway".to_string(), if symbol.ends_with(".SS") || symbol.ends_with(".SH") {
status: ServiceStatus::Ok, "CN".to_string()
version: env!("CARGO_PKG_VERSION").to_string(), } else if symbol.ends_with(".HK") {
details, "HK".to_string()
} else {
"US".to_string()
}
}
// --- New Workflow Handlers ---
/// [POST /v1/tools/resolve-symbol]
/// Resolves and normalizes a symbol without starting a workflow.
async fn resolve_symbol(
Json(payload): Json<SymbolResolveRequest>,
) -> Result<impl IntoResponse> {
let market = if let Some(m) = payload.market {
if m.is_empty() {
infer_market(&payload.symbol)
} else {
m
}
} else {
infer_market(&payload.symbol)
}; };
Json(status)
} let market_enum = Market::from(market.as_str());
async fn get_current_tasks() -> Json<Vec<TaskProgress>> { let normalized_symbol = CanonicalSymbol::new(&payload.symbol, &market_enum);
Json(vec![])
Ok(Json(SymbolResolveResponse {
symbol: normalized_symbol.into(),
market,
}))
} }
// --- V1 API Handlers --- /// [POST /v1/workflow/start]
/// Initiates a new analysis workflow via the Orchestrator.
/// [POST /v1/data-requests] async fn start_workflow(
/// Triggers the data fetching process by publishing a command to the message bus.
async fn trigger_data_fetch(
State(state): State<AppState>, State(state): State<AppState>,
Json(payload): Json<DataRequest>, Json(payload): Json<DataRequest>,
) -> Result<impl IntoResponse> { ) -> Result<impl IntoResponse> {
let request_id = Uuid::new_v4(); let request_id = Uuid::new_v4();
let command = FetchCompanyDataCommand {
let market = if let Some(m) = payload.market {
if m.is_empty() {
infer_market(&payload.symbol)
} else {
m
}
} else {
infer_market(&payload.symbol)
};
let market_enum = Market::from(market.as_str());
let normalized_symbol = CanonicalSymbol::new(&payload.symbol, &market_enum);
let command = StartWorkflowCommand {
request_id, request_id,
symbol: payload.symbol.clone(), symbol: normalized_symbol.clone(),
market: payload.market, market: market.clone(),
template_id: payload.template_id.clone(), template_id: payload.template_id,
}; };
info!(request_id = %request_id, "Publishing data fetch command"); info!(request_id = %request_id, "Publishing StartWorkflowCommand to Orchestrator");
state state.nats_client
.nats_client
.publish( .publish(
DATA_FETCH_QUEUE.to_string(), command.subject().to_string(),
serde_json::to_vec(&command).unwrap().into(), serde_json::to_vec(&command).unwrap().into(),
) )
.await?; .await?;
Ok(( Ok((
StatusCode::ACCEPTED, StatusCode::ACCEPTED,
Json(RequestAcceptedResponse { request_id }), Json(RequestAcceptedResponse {
request_id,
symbol: normalized_symbol.into(),
market
}),
)) ))
} }
/// [GET /v1/workflow/events/:request_id]
/// SSE endpoint that proxies events from NATS to the frontend.
async fn workflow_events_stream(
State(state): State<AppState>,
Path(request_id): Path<Uuid>,
) -> Result<impl IntoResponse> {
info!("Client connected to event stream for {}", request_id);
// 1. Send SyncStateCommand to ask Orchestrator for a snapshot
// This ensures if the client reconnects, they get the latest state immediately.
let sync_cmd = SyncStateCommand { request_id };
if let Err(e) = state.nats_client
.publish(sync_cmd.subject().to_string(), serde_json::to_vec(&sync_cmd).unwrap().into())
.await
{
error!("Failed to send SyncStateCommand: {}", e);
}
// 2. Subscribe to NATS topic
let topic = NatsSubject::WorkflowProgress(request_id).to_string();
let mut subscriber = state.nats_client.subscribe(topic).await?;
// 3. Convert NATS stream to SSE stream
let stream = async_stream::stream! {
while let Some(msg) = subscriber.next().await {
if let Ok(event) = serde_json::from_slice::<WorkflowEvent>(&msg.payload) {
match axum::response::sse::Event::default().json_data(event) {
Ok(sse_event) => yield Ok::<_, anyhow::Error>(sse_event),
Err(e) => error!("Failed to serialize SSE event: {}", e),
}
}
}
};
Ok(axum::response::Sse::new(stream)
.keep_alive(axum::response::sse::KeepAlive::default()))
}
// --- Legacy Handler (Renamed) ---
async fn trigger_data_fetch_legacy(
State(state): State<AppState>,
Json(payload): Json<DataRequest>,
) -> Result<impl IntoResponse> {
// Redirect to new workflow start for compatibility if possible, or keep as is for now?
// Let's just call start_workflow to gradually migrate behavior.
start_workflow(State(state), Json(payload)).await
}
async fn health_check() -> impl IntoResponse {
(StatusCode::OK, "OK")
}
async fn proxy_get_session_data(
State(_state): State<AppState>,
Path(_request_id): Path<Uuid>,
) -> Result<impl IntoResponse> {
Ok((StatusCode::NOT_IMPLEMENTED, Json(serde_json::json!({"error": "Not implemented"}))))
}
async fn proxy_analysis_stream(
State(_state): State<AppState>,
) -> Result<impl IntoResponse> {
Ok((StatusCode::NOT_IMPLEMENTED, Json(serde_json::json!({"error": "Not implemented"}))))
}
/// [POST /v1/analysis-requests/:symbol] /// [POST /v1/analysis-requests/:symbol]
/// Triggers the analysis report generation workflow by publishing a command. /// Triggers the analysis report generation workflow by publishing a command.
async fn trigger_analysis_generation( async fn trigger_analysis_generation(
@ -140,9 +264,19 @@ async fn trigger_analysis_generation(
Json(payload): Json<AnalysisRequest>, Json(payload): Json<AnalysisRequest>,
) -> Result<impl IntoResponse> { ) -> Result<impl IntoResponse> {
let request_id = Uuid::new_v4(); let request_id = Uuid::new_v4();
// Try to infer market to help normalization, defaulting to US if unclear but keeping original behavior safe
let market_str = infer_market(&symbol);
let market_enum = Market::from(market_str.as_str());
let normalized_symbol = CanonicalSymbol::new(&symbol, &market_enum);
if normalized_symbol.as_str() != symbol {
info!("Normalized analysis request symbol '{}' to '{}'", symbol, normalized_symbol);
}
let command = GenerateReportCommand { let command = GenerateReportCommand {
request_id, request_id,
symbol, symbol: normalized_symbol.clone(),
template_id: payload.template_id, template_id: payload.template_id,
}; };
@ -151,14 +285,21 @@ async fn trigger_analysis_generation(
state state
.nats_client .nats_client
.publish( .publish(
ANALYSIS_COMMANDS_QUEUE.to_string(), command.subject().to_string(),
serde_json::to_vec(&command).unwrap().into(), serde_json::to_vec(&command).unwrap().into(),
) )
.await?; .await?;
// Infer market for response consistency
let market = infer_market(normalized_symbol.as_str());
Ok(( Ok((
StatusCode::ACCEPTED, StatusCode::ACCEPTED,
Json(RequestAcceptedResponse { request_id }), Json(RequestAcceptedResponse {
request_id,
symbol: normalized_symbol.into(),
market
}),
)) ))
} }
@ -197,25 +338,42 @@ async fn get_task_progress(
Path(request_id): Path<Uuid>, Path(request_id): Path<Uuid>,
) -> Result<impl IntoResponse> { ) -> Result<impl IntoResponse> {
let client = reqwest::Client::new(); let client = reqwest::Client::new();
let fetches = state let services = state.get_all_services();
.config
.provider_services let fetches = services
.iter() .iter()
.map(|service_url| { .map(|(service_id, service_url)| {
let client = client.clone(); let client = client.clone();
let url = format!("{}/tasks", service_url); let url = format!("{}/tasks", service_url);
let service_id_clone = service_id.clone();
async move { async move {
match client.get(&url).send().await { match client.get(&url).send().await {
Ok(resp) => match resp.json::<Vec<TaskProgress>>().await { Ok(resp) => match resp.json::<Vec<TaskProgress>>().await {
Ok(tasks) => Some(tasks), Ok(tasks) => Some(tasks),
Err(e) => { Err(e) => {
warn!("Failed to decode tasks from {}: {}", url, e); warn!("Failed to decode tasks from {}: {}", url, e);
None // Return a synthetic error task for this provider
Some(vec![TaskProgress {
request_id,
task_name: format!("{}:unreachable", service_id_clone),
status: TaskStatus::Failed,
progress_percent: 0,
details: "Invalid response format".to_string(),
started_at: chrono::Utc::now(),
}])
} }
}, },
Err(e) => { Err(e) => {
warn!("Failed to fetch tasks from {}: {}", url, e); warn!("Failed to fetch tasks from {}: {}", url, e);
None // Return a synthetic error task for this provider
Some(vec![TaskProgress {
request_id,
task_name: format!("{}:unreachable", service_id_clone),
status: TaskStatus::Failed,
progress_percent: 0,
details: format!("Connection Error: {}", e),
started_at: chrono::Utc::now(),
}])
} }
} }
} }
@ -229,11 +387,21 @@ async fn get_task_progress(
} }
} }
if let Some(task) = merged.into_iter().find(|t| t.request_id == request_id) { let tasks_for_req: Vec<TaskProgress> = merged.into_iter()
Ok((StatusCode::OK, Json(task)).into_response()) .filter(|t| t.request_id == request_id)
} else { .collect();
Ok((StatusCode::NOT_FOUND, Json(serde_json::json!({"error": "Task not found"}))).into_response())
if tasks_for_req.is_empty() {
// Instead of returning 404, we should probably return an empty list if we have checked everyone
// But if we really found nothing (even synthetic errors), then 404 is fine.
// With synthetic errors, this should rarely happen unless no providers are registered.
if services.is_empty() {
warn!("No providers registered to query for tasks.");
} }
return Ok((StatusCode::NOT_FOUND, Json(serde_json::json!({"error": "Task not found"}))).into_response());
}
Ok(Json(tasks_for_req).into_response())
} }
@ -254,17 +422,8 @@ async fn test_data_source_config(
) -> Result<impl IntoResponse> { ) -> Result<impl IntoResponse> {
info!("test_data_source_config: type={}", payload.r#type); info!("test_data_source_config: type={}", payload.r#type);
let target_service_url = match payload.r#type.as_str() { // Dynamic discovery
"tushare" => state.config.provider_services.iter().find(|s| s.contains("tushare")), let target_service_url = state.get_service_url(&payload.r#type);
"finnhub" => state.config.provider_services.iter().find(|s| s.contains("finnhub")),
"alphavantage" => state.config.provider_services.iter().find(|s| s.contains("alphavantage")),
_ => {
return Ok((
StatusCode::BAD_REQUEST,
Json(serde_json::json!({ "error": "Unsupported config type" })),
).into_response());
}
};
if let Some(base_url) = target_service_url { if let Some(base_url) = target_service_url {
let client = reqwest::Client::new(); let client = reqwest::Client::new();
@ -293,10 +452,10 @@ async fn test_data_source_config(
let response_json: serde_json::Value = response.json().await?; let response_json: serde_json::Value = response.json().await?;
Ok((StatusCode::OK, Json(response_json)).into_response()) Ok((StatusCode::OK, Json(response_json)).into_response())
} else { } else {
warn!("No downstream service found for config type: {}", payload.r#type); warn!("No downstream service registered for config type: {}", payload.r#type);
Ok(( Ok((
StatusCode::NOT_IMPLEMENTED, StatusCode::NOT_IMPLEMENTED,
Json(serde_json::json!({ "error": "No downstream service configured for this type" })), Json(serde_json::json!({ "error": "No downstream service registered for this type" })),
).into_response()) ).into_response())
} }
} }

View File

@ -0,0 +1,64 @@
use axum::{
extract::State,
http::StatusCode,
response::IntoResponse,
Json,
};
use common_contracts::registry::{Heartbeat, ServiceRegistration};
use std::time::Instant;
use tracing::{info, warn};
use crate::{error::Result, state::{AppState, RegistryEntry}};
/// [POST /v1/registry/register]
pub async fn register_service(
State(state): State<AppState>,
Json(payload): Json<ServiceRegistration>,
) -> Result<impl IntoResponse> {
info!("Registering service: {} ({}) at {}", payload.service_id, payload.service_name, payload.base_url);
let entry = RegistryEntry {
registration: payload.clone(),
last_heartbeat: Instant::now(),
};
let mut registry = state.registry.write().unwrap();
registry.insert(payload.service_id.clone(), entry);
Ok(StatusCode::OK)
}
/// [POST /v1/registry/heartbeat]
pub async fn heartbeat(
State(state): State<AppState>,
Json(payload): Json<Heartbeat>,
) -> Result<impl IntoResponse> {
let mut registry = state.registry.write().unwrap();
if let Some(entry) = registry.get_mut(&payload.service_id) {
entry.last_heartbeat = Instant::now();
Ok(StatusCode::OK)
} else {
// This is the key part for self-healing: tell the provider we don't know them
warn!("Received heartbeat from unknown service: {}", payload.service_id);
Ok(StatusCode::NOT_FOUND)
}
}
/// [POST /v1/registry/deregister]
pub async fn deregister_service(
State(state): State<AppState>,
Json(payload): Json<serde_json::Value>,
) -> Result<impl IntoResponse> {
let service_id = payload.get("service_id")
.and_then(|v| v.as_str())
.ok_or_else(|| crate::error::AppError::BadRequest("Missing service_id".into()))?;
info!("Deregistering service: {}", service_id);
let mut registry = state.registry.write().unwrap();
registry.remove(service_id);
Ok(StatusCode::OK)
}

View File

@ -7,7 +7,6 @@ pub struct AppConfig {
pub nats_addr: String, pub nats_addr: String,
pub data_persistence_service_url: String, pub data_persistence_service_url: String,
pub report_generator_service_url: String, pub report_generator_service_url: String,
pub provider_services: Vec<String>,
} }
impl AppConfig { impl AppConfig {
@ -22,34 +21,11 @@ impl AppConfig {
let report_generator_service_url: String = cfg.get::<String>("report_generator_service_url") let report_generator_service_url: String = cfg.get::<String>("report_generator_service_url")
.unwrap_or_else(|_| "http://report-generator-service:8004".to_string()); .unwrap_or_else(|_| "http://report-generator-service:8004".to_string());
// Parse provider_services deterministically:
// 1) prefer array from env (e.g., PROVIDER_SERVICES__0, PROVIDER_SERVICES__1, ...)
// 2) fallback to explicit JSON in PROVIDER_SERVICES
let provider_services: Vec<String> = if let Ok(arr) = cfg.get_array("provider_services") {
let mut out: Vec<String> = Vec::with_capacity(arr.len());
for v in arr {
let s = v.into_string().map_err(|e| {
config::ConfigError::Message(format!("provider_services must be strings: {}", e))
})?;
out.push(s);
}
out
} else {
let json = cfg.get_string("provider_services")?;
serde_json::from_str::<Vec<String>>(&json).map_err(|e| {
config::ConfigError::Message(format!(
"Invalid JSON for provider_services: {}",
e
))
})?
};
Ok(Self { Ok(Self {
server_port, server_port,
nats_addr, nats_addr,
data_persistence_service_url, data_persistence_service_url,
report_generator_service_url, report_generator_service_url,
provider_services,
}) })
} }
} }

View File

@ -24,8 +24,11 @@ pub enum AppError {
#[error("HTTP request to another service failed: {0}")] #[error("HTTP request to another service failed: {0}")]
ServiceRequest(#[from] reqwest::Error), ServiceRequest(#[from] reqwest::Error),
#[error("An unexpected error occurred.")] #[error("Bad request: {0}")]
Anyhow(#[from] anyhow::Error), BadRequest(String),
#[error("Internal error: {0}")]
Internal(#[from] anyhow::Error),
} }
impl IntoResponse for AppError { impl IntoResponse for AppError {
@ -37,7 +40,8 @@ impl IntoResponse for AppError {
AppError::MessageBusSubscribe(msg) => (StatusCode::SERVICE_UNAVAILABLE, msg.clone()), AppError::MessageBusSubscribe(msg) => (StatusCode::SERVICE_UNAVAILABLE, msg.clone()),
AppError::MessageBusConnect(msg) => (StatusCode::SERVICE_UNAVAILABLE, msg.clone()), AppError::MessageBusConnect(msg) => (StatusCode::SERVICE_UNAVAILABLE, msg.clone()),
AppError::ServiceRequest(err) => (StatusCode::BAD_GATEWAY, err.to_string()), AppError::ServiceRequest(err) => (StatusCode::BAD_GATEWAY, err.to_string()),
AppError::Anyhow(err) => (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()), AppError::BadRequest(msg) => (StatusCode::BAD_REQUEST, msg.clone()),
AppError::Internal(err) => (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()),
}; };
let body = Json(json!({ "error": message })); let body = Json(json!({ "error": message }));
(status, body).into_response() (status, body).into_response()

View File

@ -29,11 +29,10 @@ async fn main() {
eprintln!("api-gateway launching: pid={}, ts_unix={}", process::id(), ts); eprintln!("api-gateway launching: pid={}, ts_unix={}", process::id(), ts);
// Print critical environment variables relevant to configuration (no secrets) // Print critical environment variables relevant to configuration (no secrets)
eprintln!( eprintln!(
"env: SERVER_PORT={:?}, NATS_ADDR={:?}, DATA_PERSISTENCE_SERVICE_URL={:?}, PROVIDER_SERVICES.len={}", "env: SERVER_PORT={:?}, NATS_ADDR={:?}, DATA_PERSISTENCE_SERVICE_URL={:?}",
std::env::var("SERVER_PORT").ok(), std::env::var("SERVER_PORT").ok(),
std::env::var("NATS_ADDR").ok(), std::env::var("NATS_ADDR").ok(),
std::env::var("DATA_PERSISTENCE_SERVICE_URL").ok(), std::env::var("DATA_PERSISTENCE_SERVICE_URL").ok(),
std::env::var("PROVIDER_SERVICES").ok().map(|s| s.len()).unwrap_or(0),
); );
let _ = io::stderr().flush(); let _ = io::stderr().flush();
@ -67,7 +66,7 @@ async fn run() -> Result<()> {
persistence_url = %config.data_persistence_service_url, persistence_url = %config.data_persistence_service_url,
"Loaded configuration" "Loaded configuration"
); );
info!("Configured provider services: {:?}", config.provider_services); // info!("Configured provider services: {:?}", config.provider_services); // Removed in favor of dynamic registry
// Initialize application state // Initialize application state
let app_state = AppState::new(config).await?; let app_state = AppState::new(config).await?;
@ -83,13 +82,13 @@ async fn run() -> Result<()> {
Ok(l) => l, Ok(l) => l,
Err(e) => { Err(e) => {
error!(%addr, err = %e, "Failed to bind TCP listener"); error!(%addr, err = %e, "Failed to bind TCP listener");
return Err(error::AppError::Anyhow(anyhow::anyhow!(e))); return Err(error::AppError::Internal(anyhow::anyhow!(e)));
} }
}; };
info!("HTTP server listening on port {}", port); info!("HTTP server listening on port {}", port);
if let Err(e) = axum::serve(listener, app).await { if let Err(e) = axum::serve(listener, app).await {
error!(err = %e, "HTTP server terminated with error"); error!(err = %e, "HTTP server terminated with error");
return Err(error::AppError::Anyhow(anyhow::anyhow!(e))); return Err(error::AppError::Internal(anyhow::anyhow!(e)));
} }
Ok(()) Ok(())

View File

@ -46,6 +46,19 @@ impl PersistenceClient {
Ok(financials) Ok(financials)
} }
pub async fn get_session_data(&self, request_id: uuid::Uuid) -> Result<Vec<common_contracts::dtos::SessionDataDto>> {
let url = format!("{}/session-data/{}", self.base_url, request_id);
let data = self
.client
.get(&url)
.send()
.await?
.error_for_status()?
.json::<Vec<common_contracts::dtos::SessionDataDto>>()
.await?;
Ok(data)
}
pub async fn get_analysis_results(&self, symbol: &str) -> Result<Vec<common_contracts::dtos::AnalysisResultDto>> { pub async fn get_analysis_results(&self, symbol: &str) -> Result<Vec<common_contracts::dtos::AnalysisResultDto>> {
let url = format!("{}/analysis-results?symbol={}", self.base_url, symbol); let url = format!("{}/analysis-results?symbol={}", self.base_url, symbol);
let results = self let results = self

View File

@ -1,16 +1,26 @@
use crate::config::AppConfig; use crate::config::AppConfig;
use crate::error::Result; use crate::error::Result;
use crate::persistence::PersistenceClient; use crate::persistence::PersistenceClient;
use std::sync::Arc; use std::sync::{Arc, RwLock};
use async_nats::Client as NatsClient; use async_nats::Client as NatsClient;
use tokio::time::{sleep, Duration}; use tokio::time::{sleep, Duration};
use tracing::{info, warn}; use tracing::{info, warn};
use std::collections::HashMap;
use std::time::Instant;
use common_contracts::registry::{ServiceRegistration, ServiceRole};
#[derive(Clone, Debug)]
pub struct RegistryEntry {
pub registration: ServiceRegistration,
pub last_heartbeat: Instant,
}
#[derive(Clone)] #[derive(Clone)]
pub struct AppState { pub struct AppState {
pub config: Arc<AppConfig>, pub config: Arc<AppConfig>,
pub nats_client: NatsClient, pub nats_client: NatsClient,
pub persistence_client: PersistenceClient, pub persistence_client: PersistenceClient,
pub registry: Arc<RwLock<HashMap<String, RegistryEntry>>>,
} }
impl AppState { impl AppState {
@ -24,8 +34,38 @@ impl AppState {
config: Arc::new(config), config: Arc::new(config),
nats_client, nats_client,
persistence_client, persistence_client,
registry: Arc::new(RwLock::new(HashMap::new())),
}) })
} }
/// Finds a healthy service instance by name (e.g., "tushare").
/// Returns the base_url.
pub fn get_service_url(&self, service_name: &str) -> Option<String> {
let registry = self.registry.read().unwrap();
// TODO: Implement Round-Robin or check Last Heartbeat for health?
// For now, return the first match.
registry.values()
.find(|entry| entry.registration.service_name == service_name)
.map(|entry| entry.registration.base_url.clone())
}
/// Returns all registered services as (service_id, base_url) tuples.
pub fn get_all_services(&self) -> Vec<(String, String)> {
let registry = self.registry.read().unwrap();
registry.values()
.map(|entry| (entry.registration.service_id.clone(), entry.registration.base_url.clone()))
.collect()
}
pub fn get_provider_count(&self) -> usize {
let registry = self.registry.read().unwrap();
registry.values()
.filter(|entry| {
// Strict type checking using ServiceRole
entry.registration.role == ServiceRole::DataProvider
})
.count()
}
} }
const MAX_NATS_CONNECT_ATTEMPTS: usize = 30; const MAX_NATS_CONNECT_ATTEMPTS: usize = 30;

View File

@ -37,6 +37,12 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "anyhow"
version = "1.0.100"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61"
[[package]] [[package]]
name = "arrayvec" name = "arrayvec"
version = "0.7.6" version = "0.7.6"
@ -52,6 +58,12 @@ dependencies = [
"num-traits", "num-traits",
] ]
[[package]]
name = "atomic-waker"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0"
[[package]] [[package]]
name = "autocfg" name = "autocfg"
version = "1.5.0" version = "1.5.0"
@ -248,12 +260,17 @@ dependencies = [
name = "common-contracts" name = "common-contracts"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow",
"chrono", "chrono",
"log",
"reqwest",
"rust_decimal", "rust_decimal",
"serde", "serde",
"serde_json", "serde_json",
"service_kit", "service_kit",
"sqlx", "sqlx",
"tokio",
"tracing",
"utoipa", "utoipa",
"uuid", "uuid",
] ]
@ -273,6 +290,16 @@ version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8"
[[package]]
name = "core-foundation"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f"
dependencies = [
"core-foundation-sys",
"libc",
]
[[package]] [[package]]
name = "core-foundation-sys" name = "core-foundation-sys"
version = "0.8.7" version = "0.8.7"
@ -383,12 +410,31 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "encoding_rs"
version = "0.8.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3"
dependencies = [
"cfg-if",
]
[[package]] [[package]]
name = "equivalent" name = "equivalent"
version = "1.0.2" version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
[[package]]
name = "errno"
version = "0.3.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb"
dependencies = [
"libc",
"windows-sys 0.61.2",
]
[[package]] [[package]]
name = "etcetera" name = "etcetera"
version = "0.8.0" version = "0.8.0"
@ -411,6 +457,12 @@ dependencies = [
"pin-project-lite", "pin-project-lite",
] ]
[[package]]
name = "fastrand"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
[[package]] [[package]]
name = "find-msvc-tools" name = "find-msvc-tools"
version = "0.1.5" version = "0.1.5"
@ -440,6 +492,21 @@ version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
[[package]]
name = "foreign-types"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1"
dependencies = [
"foreign-types-shared",
]
[[package]]
name = "foreign-types-shared"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
[[package]] [[package]]
name = "form_urlencoded" name = "form_urlencoded"
version = "1.2.2" version = "1.2.2"
@ -560,6 +627,25 @@ dependencies = [
"wasip2", "wasip2",
] ]
[[package]]
name = "h2"
version = "0.4.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3c0b69cfcb4e1b9f1bf2f53f95f766e4661169728ec61cd3fe5a0166f2d1386"
dependencies = [
"atomic-waker",
"bytes",
"fnv",
"futures-core",
"futures-sink",
"http",
"indexmap",
"slab",
"tokio",
"tokio-util",
"tracing",
]
[[package]] [[package]]
name = "hashbrown" name = "hashbrown"
version = "0.12.3" version = "0.12.3"
@ -668,6 +754,92 @@ dependencies = [
"pin-project-lite", "pin-project-lite",
] ]
[[package]]
name = "httparse"
version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87"
[[package]]
name = "hyper"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11"
dependencies = [
"atomic-waker",
"bytes",
"futures-channel",
"futures-core",
"h2",
"http",
"http-body",
"httparse",
"itoa",
"pin-project-lite",
"pin-utils",
"smallvec",
"tokio",
"want",
]
[[package]]
name = "hyper-rustls"
version = "0.27.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58"
dependencies = [
"http",
"hyper",
"hyper-util",
"rustls",
"rustls-pki-types",
"tokio",
"tokio-rustls",
"tower-service",
]
[[package]]
name = "hyper-tls"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0"
dependencies = [
"bytes",
"http-body-util",
"hyper",
"hyper-util",
"native-tls",
"tokio",
"tokio-native-tls",
"tower-service",
]
[[package]]
name = "hyper-util"
version = "0.1.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "52e9a2a24dc5c6821e71a7030e1e14b7b632acac55c40e9d2e082c621261bb56"
dependencies = [
"base64",
"bytes",
"futures-channel",
"futures-core",
"futures-util",
"http",
"http-body",
"hyper",
"ipnet",
"libc",
"percent-encoding",
"pin-project-lite",
"socket2",
"system-configuration",
"tokio",
"tower-service",
"tracing",
"windows-registry",
]
[[package]] [[package]]
name = "iana-time-zone" name = "iana-time-zone"
version = "0.1.64" version = "0.1.64"
@ -815,6 +987,22 @@ dependencies = [
"rustversion", "rustversion",
] ]
[[package]]
name = "ipnet"
version = "2.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130"
[[package]]
name = "iri-string"
version = "0.7.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4f867b9d1d896b67beb18518eda36fdb77a32ea590de864f1325b294a6d14397"
dependencies = [
"memchr",
"serde",
]
[[package]] [[package]]
name = "itoa" name = "itoa"
version = "1.0.15" version = "1.0.15"
@ -873,6 +1061,12 @@ dependencies = [
"vcpkg", "vcpkg",
] ]
[[package]]
name = "linux-raw-sys"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039"
[[package]] [[package]]
name = "litemap" name = "litemap"
version = "0.8.1" version = "0.8.1"
@ -933,6 +1127,23 @@ dependencies = [
"windows-sys 0.61.2", "windows-sys 0.61.2",
] ]
[[package]]
name = "native-tls"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e"
dependencies = [
"libc",
"log",
"openssl",
"openssl-probe",
"openssl-sys",
"schannel",
"security-framework",
"security-framework-sys",
"tempfile",
]
[[package]] [[package]]
name = "num-bigint-dig" name = "num-bigint-dig"
version = "0.8.6" version = "0.8.6"
@ -985,6 +1196,50 @@ version = "1.21.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
[[package]]
name = "openssl"
version = "0.10.75"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08838db121398ad17ab8531ce9de97b244589089e290a384c900cb9ff7434328"
dependencies = [
"bitflags",
"cfg-if",
"foreign-types",
"libc",
"once_cell",
"openssl-macros",
"openssl-sys",
]
[[package]]
name = "openssl-macros"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.110",
]
[[package]]
name = "openssl-probe"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e"
[[package]]
name = "openssl-sys"
version = "0.9.111"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "82cab2d520aa75e3c58898289429321eb788c3106963d0dc886ec7a5f4adc321"
dependencies = [
"cc",
"libc",
"pkg-config",
"vcpkg",
]
[[package]] [[package]]
name = "parking" name = "parking"
version = "2.2.1" version = "2.2.1"
@ -1242,6 +1497,46 @@ dependencies = [
"bytecheck", "bytecheck",
] ]
[[package]]
name = "reqwest"
version = "0.12.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f"
dependencies = [
"base64",
"bytes",
"encoding_rs",
"futures-core",
"h2",
"http",
"http-body",
"http-body-util",
"hyper",
"hyper-rustls",
"hyper-tls",
"hyper-util",
"js-sys",
"log",
"mime",
"native-tls",
"percent-encoding",
"pin-project-lite",
"rustls-pki-types",
"serde",
"serde_json",
"serde_urlencoded",
"sync_wrapper",
"tokio",
"tokio-native-tls",
"tower",
"tower-http",
"tower-service",
"url",
"wasm-bindgen",
"wasm-bindgen-futures",
"web-sys",
]
[[package]] [[package]]
name = "ring" name = "ring"
version = "0.17.14" version = "0.17.14"
@ -1321,6 +1616,19 @@ dependencies = [
"serde_json", "serde_json",
] ]
[[package]]
name = "rustix"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e"
dependencies = [
"bitflags",
"errno",
"libc",
"linux-raw-sys",
"windows-sys 0.61.2",
]
[[package]] [[package]]
name = "rustls" name = "rustls"
version = "0.23.35" version = "0.23.35"
@ -1367,6 +1675,15 @@ version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]]
name = "schannel"
version = "0.1.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1"
dependencies = [
"windows-sys 0.61.2",
]
[[package]] [[package]]
name = "schemars" name = "schemars"
version = "1.1.0" version = "1.1.0"
@ -1405,6 +1722,29 @@ version = "4.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b" checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
[[package]]
name = "security-framework"
version = "2.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02"
dependencies = [
"bitflags",
"core-foundation",
"core-foundation-sys",
"libc",
"security-framework-sys",
]
[[package]]
name = "security-framework-sys"
version = "2.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0"
dependencies = [
"core-foundation-sys",
"libc",
]
[[package]] [[package]]
name = "serde" name = "serde"
version = "1.0.228" version = "1.0.228"
@ -1866,6 +2206,9 @@ name = "sync_wrapper"
version = "1.0.2" version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263"
dependencies = [
"futures-core",
]
[[package]] [[package]]
name = "synstructure" name = "synstructure"
@ -1878,12 +2221,46 @@ dependencies = [
"syn 2.0.110", "syn 2.0.110",
] ]
[[package]]
name = "system-configuration"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b"
dependencies = [
"bitflags",
"core-foundation",
"system-configuration-sys",
]
[[package]]
name = "system-configuration-sys"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4"
dependencies = [
"core-foundation-sys",
"libc",
]
[[package]] [[package]]
name = "tap" name = "tap"
version = "1.0.1" version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
[[package]]
name = "tempfile"
version = "3.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16"
dependencies = [
"fastrand",
"getrandom 0.3.4",
"once_cell",
"rustix",
"windows-sys 0.61.2",
]
[[package]] [[package]]
name = "thiserror" name = "thiserror"
version = "2.0.17" version = "2.0.17"
@ -1940,9 +2317,41 @@ dependencies = [
"mio", "mio",
"pin-project-lite", "pin-project-lite",
"socket2", "socket2",
"tokio-macros",
"windows-sys 0.61.2", "windows-sys 0.61.2",
] ]
[[package]]
name = "tokio-macros"
version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.110",
]
[[package]]
name = "tokio-native-tls"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2"
dependencies = [
"native-tls",
"tokio",
]
[[package]]
name = "tokio-rustls"
version = "0.26.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61"
dependencies = [
"rustls",
"tokio",
]
[[package]] [[package]]
name = "tokio-stream" name = "tokio-stream"
version = "0.1.17" version = "0.1.17"
@ -1954,6 +2363,19 @@ dependencies = [
"tokio", "tokio",
] ]
[[package]]
name = "tokio-util"
version = "0.7.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2efa149fe76073d6e8fd97ef4f4eca7b67f599660115591483572e406e165594"
dependencies = [
"bytes",
"futures-core",
"futures-sink",
"pin-project-lite",
"tokio",
]
[[package]] [[package]]
name = "toml" name = "toml"
version = "0.9.8" version = "0.9.8"
@ -2015,6 +2437,25 @@ dependencies = [
"futures-util", "futures-util",
"pin-project-lite", "pin-project-lite",
"sync_wrapper", "sync_wrapper",
"tokio",
"tower-layer",
"tower-service",
]
[[package]]
name = "tower-http"
version = "0.6.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2"
dependencies = [
"bitflags",
"bytes",
"futures-util",
"http",
"http-body",
"iri-string",
"pin-project-lite",
"tower",
"tower-layer", "tower-layer",
"tower-service", "tower-service",
] ]
@ -2063,6 +2504,12 @@ dependencies = [
"once_cell", "once_cell",
] ]
[[package]]
name = "try-lock"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
[[package]] [[package]]
name = "typenum" name = "typenum"
version = "1.19.0" version = "1.19.0"
@ -2169,6 +2616,15 @@ version = "0.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
[[package]]
name = "want"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e"
dependencies = [
"try-lock",
]
[[package]] [[package]]
name = "wasi" name = "wasi"
version = "0.11.1+wasi-snapshot-preview1" version = "0.11.1+wasi-snapshot-preview1"
@ -2203,6 +2659,19 @@ dependencies = [
"wasm-bindgen-shared", "wasm-bindgen-shared",
] ]
[[package]]
name = "wasm-bindgen-futures"
version = "0.4.55"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "551f88106c6d5e7ccc7cd9a16f312dd3b5d36ea8b4954304657d5dfba115d4a0"
dependencies = [
"cfg-if",
"js-sys",
"once_cell",
"wasm-bindgen",
"web-sys",
]
[[package]] [[package]]
name = "wasm-bindgen-macro" name = "wasm-bindgen-macro"
version = "0.2.105" version = "0.2.105"
@ -2235,6 +2704,16 @@ dependencies = [
"unicode-ident", "unicode-ident",
] ]
[[package]]
name = "web-sys"
version = "0.3.82"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3a1f95c0d03a47f4ae1f7a64643a6bb97465d9b740f0fa8f90ea33915c99a9a1"
dependencies = [
"js-sys",
"wasm-bindgen",
]
[[package]] [[package]]
name = "webpki-roots" name = "webpki-roots"
version = "0.26.11" version = "0.26.11"
@ -2304,6 +2783,17 @@ version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
[[package]]
name = "windows-registry"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "02752bf7fbdcce7f2a27a742f798510f3e5ad88dbe84871e5168e2120c3d5720"
dependencies = [
"windows-link",
"windows-result",
"windows-strings",
]
[[package]] [[package]]
name = "windows-result" name = "windows-result"
version = "0.4.1" version = "0.4.1"

View File

@ -18,5 +18,8 @@ rust_decimal = { version = "1.36", features = ["serde"] }
utoipa = { version = "5.4", features = ["chrono", "uuid"] } utoipa = { version = "5.4", features = ["chrono", "uuid"] }
sqlx = { version = "0.8.6", features = [ "runtime-tokio-rustls", "postgres", "chrono", "uuid", "json", "rust_decimal" ] } sqlx = { version = "0.8.6", features = [ "runtime-tokio-rustls", "postgres", "chrono", "uuid", "json", "rust_decimal" ] }
service_kit = { version = "0.1.2" } service_kit = { version = "0.1.2" }
reqwest = { version = "0.12", features = ["json"] }
tokio = { version = "1", features = ["time", "sync", "macros"] }
log = "0.4"
tracing = "0.1"
anyhow = "1.0"

View File

@ -91,4 +91,38 @@ pub struct RealtimeQuoteDto {
pub source: Option<String>, pub source: Option<String>,
} }
use crate::observability::TaskStatus;
#[api_dto]
pub struct ProviderStatusDto {
pub last_updated: chrono::DateTime<chrono::Utc>,
pub status: TaskStatus,
pub data_version: Option<String>,
}
// Provider Path Params
#[api_dto]
#[derive(utoipa::IntoParams)]
pub struct ProviderPathParams {
pub symbol: String,
pub provider_id: String,
}
// Session Data & Cache DTOs
#[api_dto]
pub struct SessionDataDto {
pub request_id: Uuid,
pub symbol: String,
pub provider: String,
pub data_type: String,
pub data_payload: JsonValue,
pub created_at: Option<chrono::DateTime<chrono::Utc>>,
}
#[api_dto]
pub struct ProviderCacheDto {
pub cache_key: String,
pub data_payload: JsonValue,
pub expires_at: chrono::DateTime<chrono::Utc>,
pub updated_at: Option<chrono::DateTime<chrono::Utc>>,
}

View File

@ -3,6 +3,9 @@ pub mod models;
pub mod observability; pub mod observability;
pub mod messages; pub mod messages;
pub mod config_models; pub mod config_models;
pub mod subjects;
pub mod provider; pub mod provider;
pub mod registry;
pub mod lifecycle;
pub mod symbol_utils;
pub mod persistence_client;

View File

@ -0,0 +1,138 @@
use crate::registry::{ServiceRegistration, Heartbeat, ServiceStatus};
use reqwest::Client;
use std::time::Duration;
use tokio::time;
use log::{info, warn, error};
use std::sync::Arc;
pub struct ServiceRegistrar {
gateway_url: String,
registration: ServiceRegistration,
client: Client,
}
impl ServiceRegistrar {
pub fn new(gateway_url: String, registration: ServiceRegistration) -> Self {
Self {
gateway_url,
registration,
client: Client::new(),
}
}
/// Registers the service with the gateway.
/// It will attempt to register multiple times before giving up or returning.
/// In a real production scenario, you might want this to block until success
/// or allow the application to start and register in the background.
pub async fn register(&self) -> Result<(), reqwest::Error> {
let url = format!("{}/v1/registry/register", self.gateway_url);
let mut attempt = 0;
let max_retries = 5;
let mut delay = Duration::from_secs(2);
loop {
attempt += 1;
info!("Registering service (attempt {}/{})...", attempt, max_retries);
match self.client.post(&url)
.json(&self.registration)
.send()
.await {
Ok(resp) => {
if resp.status().is_success() {
info!("Successfully registered service: {}", self.registration.service_id);
return Ok(());
} else {
warn!("Registration failed with status: {}. Attempt: {}", resp.status(), attempt);
}
},
Err(e) => {
warn!("Registration request failed: {}. Attempt: {}", e, attempt);
}
}
if attempt >= max_retries {
// We stop blocking here. The heartbeat loop will try to recover registration if needed.
warn!("Max registration retries reached. Continuing, but service might not be discoverable yet.");
// We return Ok to allow the service to start up.
// Returning an error might cause the whole pod to crash loop which might be desired or not.
// Given the self-healing design, we can proceed.
return Ok(());
}
time::sleep(delay).await;
delay = std::cmp::min(delay * 2, Duration::from_secs(30));
}
}
/// Helper to register a single time without retries (used by recovery mechanism)
async fn register_once(&self) -> Result<(), reqwest::Error> {
let url = format!("{}/v1/registry/register", self.gateway_url);
let resp = self.client.post(&url)
.json(&self.registration)
.send()
.await?;
if !resp.status().is_success() {
return Err(resp.error_for_status().unwrap_err());
}
Ok(())
}
/// Starts the background heartbeat loop.
/// Requires `Arc<Self>` because it will be spawned into a static task.
pub async fn start_heartbeat_loop(self: Arc<Self>) {
let mut interval = time::interval(Duration::from_secs(10));
let heartbeat_url = format!("{}/v1/registry/heartbeat", self.gateway_url);
info!("Starting heartbeat loop for service: {}", self.registration.service_id);
loop {
interval.tick().await;
let heartbeat = Heartbeat {
service_id: self.registration.service_id.clone(),
status: ServiceStatus::Active,
};
match self.client.post(&heartbeat_url)
.json(&heartbeat)
.send()
.await {
Ok(resp) => {
// If the Gateway says "I don't know you" (404) or Unauthorized, we re-register.
if resp.status() == reqwest::StatusCode::NOT_FOUND || resp.status() == reqwest::StatusCode::UNAUTHORIZED {
warn!("Gateway returned {}, indicating registration loss. Re-registering...", resp.status());
if let Err(e) = self.register_once().await {
error!("Re-registration failed: {}", e);
} else {
info!("Re-registration successful.");
}
} else if !resp.status().is_success() {
warn!("Heartbeat failed with status: {}", resp.status());
}
},
Err(e) => {
error!("Heartbeat request failed: {}", e);
}
}
}
}
pub async fn deregister(&self) -> Result<(), reqwest::Error> {
let url = format!("{}/v1/registry/deregister", self.gateway_url);
info!("Deregistering service: {}", self.registration.service_id);
let payload = serde_json::json!({
"service_id": self.registration.service_id
});
let _ = self.client.post(&url)
.json(&payload)
.send()
.await?;
Ok(())
}
}

View File

@ -1,42 +1,239 @@
use serde::{Serialize, Deserialize}; use serde::{Serialize, Deserialize};
use uuid::Uuid; use uuid::Uuid;
use crate::symbol_utils::CanonicalSymbol;
use crate::subjects::{NatsSubject, SubjectMessage};
use std::collections::HashMap;
// --- Commands --- // --- Commands ---
///
// Topic: workflow.commands.start
/// Command to initiate a new workflow.
/// Published by: `api-gateway` /// Published by: `api-gateway`
/// Consumed by: `workflow-orchestrator`
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct StartWorkflowCommand {
pub request_id: Uuid,
pub symbol: CanonicalSymbol,
pub market: String,
pub template_id: String,
}
impl SubjectMessage for StartWorkflowCommand {
fn subject(&self) -> NatsSubject {
NatsSubject::WorkflowCommandStart
}
}
// Topic: workflow.commands.sync_state
/// Command to request a state snapshot for re-alignment.
/// Published by: `api-gateway` (on client connect/reconnect)
/// Consumed by: `workflow-orchestrator`
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct SyncStateCommand {
pub request_id: Uuid,
}
impl SubjectMessage for SyncStateCommand {
fn subject(&self) -> NatsSubject {
NatsSubject::WorkflowCommandSyncState
}
}
/// Command to trigger data fetching.
/// Published by: `workflow-orchestrator` (previously api-gateway)
/// Consumed by: `*-provider-services` /// Consumed by: `*-provider-services`
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub struct FetchCompanyDataCommand { pub struct FetchCompanyDataCommand {
pub request_id: Uuid, pub request_id: Uuid,
pub symbol: String, pub symbol: CanonicalSymbol,
pub market: String, pub market: String,
pub template_id: Option<String>, // Optional trigger for analysis pub template_id: Option<String>, // Optional trigger for analysis
} }
impl SubjectMessage for FetchCompanyDataCommand {
fn subject(&self) -> NatsSubject {
NatsSubject::DataFetchCommands
}
}
/// Command to start a full report generation workflow. /// Command to start a full report generation workflow.
/// /// Published by: `workflow-orchestrator` (previously api-gateway)
/// Published by: `api-gateway`
/// Consumed by: `report-generator-service` /// Consumed by: `report-generator-service`
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
pub struct GenerateReportCommand { pub struct GenerateReportCommand {
pub request_id: Uuid, pub request_id: Uuid,
pub symbol: String, pub symbol: CanonicalSymbol,
pub template_id: String, pub template_id: String,
} }
impl SubjectMessage for GenerateReportCommand {
fn subject(&self) -> NatsSubject {
NatsSubject::AnalysisCommandGenerateReport
}
}
// --- Events --- // --- Events ---
// Topic: events.workflow.{request_id}
/// Unified event stream for frontend consumption.
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(tag = "type", content = "payload")]
pub enum WorkflowEvent {
// 1. 流程初始化 (携带完整的任务依赖图)
WorkflowStarted {
timestamp: i64,
// 定义所有任务及其依赖关系,前端可据此绘制流程图或进度条
task_graph: WorkflowDag
},
// 2. 任务状态变更 (核心事件)
TaskStateChanged {
task_id: String, // e.g., "fetch:tushare", "process:clean_financials", "module:swot_analysis"
task_type: TaskType, // DataFetch | DataProcessing | Analysis
status: TaskStatus, // Pending, Scheduled, Running, Completed, Failed, Skipped
message: Option<String>,
timestamp: i64
},
// 3. 任务流式输出 (用于 LLM 打字机效果)
TaskStreamUpdate {
task_id: String,
content_delta: String,
index: u32
},
// 4. 流程整体结束
WorkflowCompleted {
result_summary: serde_json::Value,
end_timestamp: i64
},
WorkflowFailed {
reason: String,
is_fatal: bool,
end_timestamp: i64
},
// 5. 状态快照 (用于重连/丢包恢复)
// 当前端重连或显式发送 SyncStateCommand 时Orchestrator 发送此事件
WorkflowStateSnapshot {
timestamp: i64,
task_graph: WorkflowDag,
tasks_status: HashMap<String, TaskStatus>, // 当前所有任务的最新状态
tasks_output: HashMap<String, Option<String>> // (可选) 已完成任务的关键输出摘要
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct WorkflowDag {
pub nodes: Vec<TaskNode>,
pub edges: Vec<TaskDependency> // from -> to
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct TaskDependency {
pub from: String,
pub to: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct TaskNode {
pub id: String,
pub name: String,
pub r#type: TaskType,
pub initial_status: TaskStatus
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone, Copy)]
pub enum TaskType {
DataFetch, // 创造原始上下文
DataProcessing, // 消耗并转换上下文 (New)
Analysis // 读取上下文生成新内容
}
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone, Copy)]
pub enum TaskStatus {
Pending, // 等待依赖
Scheduled, // 依赖满足,已下发给 Worker
Running, // Worker 正在执行
Completed, // 执行成功
Failed, // 执行失败
Skipped // 因上游失败或策略原因被跳过
}
#[derive(Serialize, Deserialize, Debug, Clone)] #[derive(Serialize, Deserialize, Debug, Clone)]
pub struct CompanyProfilePersistedEvent { pub struct CompanyProfilePersistedEvent {
pub request_id: Uuid, pub request_id: Uuid,
pub symbol: String, pub symbol: CanonicalSymbol,
} }
#[derive(Serialize, Deserialize, Debug, Clone)] #[derive(Serialize, Deserialize, Debug, Clone)]
pub struct FinancialsPersistedEvent { pub struct FinancialsPersistedEvent {
pub request_id: Uuid, pub request_id: Uuid,
pub symbol: String, pub symbol: CanonicalSymbol,
pub years_updated: Vec<u16>, pub years_updated: Vec<u16>,
pub template_id: Option<String>, // Pass-through for analysis trigger pub template_id: Option<String>, // Pass-through for analysis trigger
// Identity fix: Mandatory provider ID
#[serde(default)]
pub provider_id: Option<String>,
// Output pass-through: Optional data preview/summary
#[serde(default)]
pub data_summary: Option<String>,
}
impl SubjectMessage for FinancialsPersistedEvent {
fn subject(&self) -> NatsSubject {
NatsSubject::DataFinancialsPersisted
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct DataFetchFailedEvent {
pub request_id: Uuid,
pub symbol: CanonicalSymbol,
pub error: String,
// Identity fix: Mandatory provider ID
#[serde(default)]
pub provider_id: Option<String>,
}
impl SubjectMessage for DataFetchFailedEvent {
fn subject(&self) -> NatsSubject {
NatsSubject::DataFetchFailed
}
}
// Topic: events.analysis.report_generated
/// Event emitted when a report generation task (or sub-module) is completed.
/// Consumed by: `workflow-orchestrator`
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ReportGeneratedEvent {
pub request_id: Uuid,
pub symbol: CanonicalSymbol,
pub module_id: String, // Which part of the analysis finished
pub content_snapshot: Option<String>, // Optional short preview
pub model_id: Option<String>,
}
impl SubjectMessage for ReportGeneratedEvent {
fn subject(&self) -> NatsSubject {
NatsSubject::AnalysisReportGenerated
}
}
// Topic: events.analysis.report_failed
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ReportFailedEvent {
pub request_id: Uuid,
pub symbol: CanonicalSymbol,
pub module_id: String,
pub error: String,
}
impl SubjectMessage for ReportFailedEvent {
fn subject(&self) -> NatsSubject {
NatsSubject::AnalysisReportFailed
}
} }

View File

@ -18,11 +18,20 @@ pub struct HealthStatus {
pub details: HashMap<String, String>, pub details: HashMap<String, String>,
} }
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, utoipa::ToSchema)]
#[serde(rename_all = "snake_case")]
pub enum TaskStatus {
Queued,
InProgress,
Completed,
Failed,
}
#[derive(Serialize, Deserialize, Debug, Clone)] #[derive(Serialize, Deserialize, Debug, Clone)]
pub struct TaskProgress { pub struct TaskProgress {
pub request_id: Uuid, pub request_id: Uuid,
pub task_name: String, pub task_name: String,
pub status: String, pub status: TaskStatus,
pub progress_percent: u8, pub progress_percent: u8,
pub details: String, pub details: String,
pub started_at: DateTime<Utc>, pub started_at: DateTime<Utc>,

View File

@ -0,0 +1,156 @@
use crate::dtos::{
SessionDataDto, ProviderCacheDto, CompanyProfileDto,
TimeSeriesFinancialBatchDto, TimeSeriesFinancialDto, ProviderStatusDto
};
use crate::config_models::{
DataSourcesConfig, LlmProvidersConfig, AnalysisTemplateSets
};
use reqwest::{Client, StatusCode};
use uuid::Uuid;
use anyhow::Result;
#[derive(Clone)]
pub struct PersistenceClient {
client: Client,
base_url: String,
}
impl PersistenceClient {
pub fn new(base_url: String) -> Self {
Self {
client: Client::new(),
base_url: base_url.trim_end_matches('/').to_string(),
}
}
// --- Session Data ---
pub async fn insert_session_data(&self, dto: &SessionDataDto) -> Result<()> {
let url = format!("{}/session-data", self.base_url);
self.client
.post(&url)
.json(dto)
.send()
.await?
.error_for_status()?;
Ok(())
}
pub async fn get_session_data(&self, request_id: Uuid) -> Result<Vec<SessionDataDto>> {
let url = format!("{}/session-data/{}", self.base_url, request_id);
let resp = self.client.get(&url).send().await?.error_for_status()?;
let data = resp.json().await?;
Ok(data)
}
// --- Provider Cache ---
pub async fn get_cache(&self, key: &str) -> Result<Option<ProviderCacheDto>> {
let url = format!("{}/provider-cache", self.base_url);
let resp = self.client
.get(&url)
.query(&[("key", key)])
.send()
.await?
.error_for_status()?;
let data = resp.json().await?;
Ok(data)
}
pub async fn set_cache(&self, dto: &ProviderCacheDto) -> Result<()> {
let url = format!("{}/provider-cache", self.base_url);
self.client
.post(&url)
.json(dto)
.send()
.await?
.error_for_status()?;
Ok(())
}
// --- Existing Methods (Ported for completeness) ---
pub async fn get_company_profile(&self, symbol: &str) -> Result<Option<CompanyProfileDto>> {
let url = format!("{}/companies/{}", self.base_url, symbol);
let resp = self.client.get(&url).send().await?;
if resp.status() == StatusCode::NOT_FOUND {
return Ok(None);
}
let profile = resp.error_for_status()?.json().await?;
Ok(Some(profile))
}
pub async fn batch_insert_financials(&self, dtos: Vec<TimeSeriesFinancialDto>) -> Result<()> {
if dtos.is_empty() {
return Ok(());
}
let url = format!("{}/market-data/financials/batch", self.base_url);
let batch = TimeSeriesFinancialBatchDto { records: dtos };
self.client
.post(&url)
.json(&batch)
.send()
.await?
.error_for_status()?;
Ok(())
}
// --- Configs ---
pub async fn get_data_sources_config(&self) -> Result<DataSourcesConfig> {
let url = format!("{}/configs/data_sources", self.base_url);
let resp = self.client.get(&url).send().await?.error_for_status()?;
let config = resp.json().await?;
Ok(config)
}
pub async fn update_data_sources_config(&self, config: &DataSourcesConfig) -> Result<DataSourcesConfig> {
let url = format!("{}/configs/data_sources", self.base_url);
let resp = self.client.put(&url).json(config).send().await?.error_for_status()?;
let updated = resp.json().await?;
Ok(updated)
}
pub async fn get_llm_providers_config(&self) -> Result<LlmProvidersConfig> {
let url = format!("{}/configs/llm_providers", self.base_url);
let resp = self.client.get(&url).send().await?.error_for_status()?;
let config = resp.json().await?;
Ok(config)
}
pub async fn update_llm_providers_config(&self, config: &LlmProvidersConfig) -> Result<LlmProvidersConfig> {
let url = format!("{}/configs/llm_providers", self.base_url);
let resp = self.client.put(&url).json(config).send().await?.error_for_status()?;
let updated = resp.json().await?;
Ok(updated)
}
pub async fn get_analysis_template_sets(&self) -> Result<AnalysisTemplateSets> {
let url = format!("{}/configs/analysis_template_sets", self.base_url);
let resp = self.client.get(&url).send().await?.error_for_status()?;
let config = resp.json().await?;
Ok(config)
}
pub async fn update_analysis_template_sets(&self, config: &AnalysisTemplateSets) -> Result<AnalysisTemplateSets> {
let url = format!("{}/configs/analysis_template_sets", self.base_url);
let resp = self.client.put(&url).json(config).send().await?.error_for_status()?;
let updated = resp.json().await?;
Ok(updated)
}
// --- Deprecated/Legacy Support ---
pub async fn update_provider_status(&self, symbol: &str, provider_id: &str, status: ProviderStatusDto) -> Result<()> {
let url = format!("{}/companies/{}/providers/{}/status", self.base_url, symbol, provider_id);
self.client.put(&url).json(&status).send().await?.error_for_status()?;
Ok(())
}
pub async fn upsert_company_profile(&self, _profile: CompanyProfileDto) -> Result<()> {
// Deprecated: No-op
Ok(())
}
}

View File

@ -0,0 +1,40 @@
use serde::{Deserialize, Serialize};
use utoipa::ToSchema;
#[derive(Debug, Clone, Serialize, Deserialize, ToSchema, PartialEq)]
pub enum ServiceStatus {
Active,
Degraded,
Maintenance,
}
#[derive(Debug, Clone, Serialize, Deserialize, ToSchema, PartialEq)]
#[serde(rename_all = "snake_case")]
pub enum ServiceRole {
DataProvider,
ReportGenerator,
Persistence,
Gateway,
Other,
}
#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
pub struct ServiceRegistration {
/// Unique ID for this service instance (e.g., "tushare-provider-uuid")
pub service_id: String,
/// Service type/name (e.g., "tushare")
pub service_name: String,
/// The role/category of the service
pub role: ServiceRole,
/// Base URL for the service (e.g., "http://10.0.1.5:8000")
pub base_url: String,
/// Health check endpoint
pub health_check_url: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
pub struct Heartbeat {
pub service_id: String,
pub status: ServiceStatus,
}

View File

@ -0,0 +1,142 @@
use std::fmt;
use std::str::FromStr;
use uuid::Uuid;
use serde::{Serialize, de::DeserializeOwned};
/// Trait for messages that know their own NATS subject.
pub trait SubjectMessage: Serialize + DeserializeOwned + Send + Sync {
fn subject(&self) -> NatsSubject;
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum NatsSubject {
// --- Commands ---
WorkflowCommandStart,
WorkflowCommandSyncState,
DataFetchCommands,
AnalysisCommandGenerateReport,
// --- Events ---
// Data Events
DataFinancialsPersisted,
DataFetchFailed,
// Analysis Events
AnalysisReportGenerated,
AnalysisReportFailed,
// Workflow Events (Dynamic)
WorkflowProgress(Uuid),
// --- Wildcards (For Subscription) ---
AnalysisEventsWildcard,
WorkflowCommandsWildcard,
DataEventsWildcard,
}
impl fmt::Display for NatsSubject {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::WorkflowCommandStart => write!(f, "workflow.commands.start"),
Self::WorkflowCommandSyncState => write!(f, "workflow.commands.sync_state"),
Self::DataFetchCommands => write!(f, "data_fetch_commands"),
Self::AnalysisCommandGenerateReport => write!(f, "analysis.commands.generate_report"),
Self::DataFinancialsPersisted => write!(f, "events.data.financials_persisted"),
Self::DataFetchFailed => write!(f, "events.data.fetch_failed"),
Self::AnalysisReportGenerated => write!(f, "events.analysis.report_generated"),
Self::AnalysisReportFailed => write!(f, "events.analysis.report_failed"),
Self::WorkflowProgress(id) => write!(f, "events.workflow.{}", id),
Self::AnalysisEventsWildcard => write!(f, "events.analysis.>"),
Self::WorkflowCommandsWildcard => write!(f, "workflow.commands.>"),
Self::DataEventsWildcard => write!(f, "events.data.>"),
}
}
}
impl FromStr for NatsSubject {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"workflow.commands.start" => Ok(Self::WorkflowCommandStart),
"workflow.commands.sync_state" => Ok(Self::WorkflowCommandSyncState),
"data_fetch_commands" => Ok(Self::DataFetchCommands),
"analysis.commands.generate_report" => Ok(Self::AnalysisCommandGenerateReport),
"events.data.financials_persisted" => Ok(Self::DataFinancialsPersisted),
"events.data.fetch_failed" => Ok(Self::DataFetchFailed),
"events.analysis.report_generated" => Ok(Self::AnalysisReportGenerated),
"events.analysis.report_failed" => Ok(Self::AnalysisReportFailed),
"events.analysis.>" => Ok(Self::AnalysisEventsWildcard),
"workflow.commands.>" => Ok(Self::WorkflowCommandsWildcard),
"events.data.>" => Ok(Self::DataEventsWildcard),
_ => {
if s.starts_with("events.workflow.") {
let uuid_str = s.trim_start_matches("events.workflow.");
if let Ok(uuid) = Uuid::parse_str(uuid_str) {
return Ok(Self::WorkflowProgress(uuid));
}
}
Err(format!("Unknown or invalid subject: {}", s))
}
}
}
}
// Implement TryFrom for convenience
impl TryFrom<&str> for NatsSubject {
type Error = String;
fn try_from(value: &str) -> Result<Self, Self::Error> {
Self::from_str(value)
}
}
impl TryFrom<String> for NatsSubject {
type Error = String;
fn try_from(value: String) -> Result<Self, Self::Error> {
Self::from_str(&value)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_subject_round_trip() {
let subjects = vec![
(NatsSubject::WorkflowCommandStart, "workflow.commands.start"),
(NatsSubject::WorkflowCommandSyncState, "workflow.commands.sync_state"),
(NatsSubject::DataFetchCommands, "data_fetch_commands"),
(NatsSubject::AnalysisCommandGenerateReport, "analysis.commands.generate_report"),
(NatsSubject::DataFinancialsPersisted, "events.data.financials_persisted"),
(NatsSubject::DataFetchFailed, "events.data.fetch_failed"),
(NatsSubject::AnalysisReportGenerated, "events.analysis.report_generated"),
(NatsSubject::AnalysisReportFailed, "events.analysis.report_failed"),
(NatsSubject::AnalysisEventsWildcard, "events.analysis.>"),
];
for (subject, string_val) in subjects {
assert_eq!(subject.to_string(), string_val);
assert_eq!(NatsSubject::from_str(string_val).unwrap(), subject);
}
}
#[test]
fn test_dynamic_subject() {
let id = Uuid::new_v4();
let subject = NatsSubject::WorkflowProgress(id);
let expected = format!("events.workflow.{}", id);
assert_eq!(subject.to_string(), expected);
assert_eq!(NatsSubject::from_str(&expected).unwrap(), subject);
}
#[test]
fn test_invalid_subject() {
assert!(NatsSubject::from_str("invalid.subject").is_err());
assert!(NatsSubject::from_str("events.workflow.invalid-uuid").is_err());
}
}

Some files were not shown because too many files have changed in this diff Show More