feat(realtime): 接入前端实时报价并完善后端缓存

前端: 新增 RealTimeQuoteResponse 类型;新增 useRealtimeQuote Hook 并在报告页图表旁展示价格与时间戳(严格 TTL,无兜底)

FastAPI: 新增 GET /financials/{market}/{symbol}/realtime?max_age_seconds=.. 只读端点;通过 DataPersistenceClient 读取 Rust 缓存

Rust: 新增 realtime_quotes hypertable 迁移;新增 POST /api/v1/market-data/quotes 与 GET /api/v1/market-data/quotes/{symbol}?market=..;新增 DTO/Model/DB 函数;修正 #[api] 宏与路径参数;生成 SQLx 离线缓存 (.sqlx) 以支持离线构建

Python: DataPersistenceClient 新增 upsert/get 实时报价,并调整 GET 路径与参数

说明: TradingView 图表是第三方 websocket,不受我们缓存控制;页面数值展示走自有缓存通路,统一且可控。
This commit is contained in:
Lv, Qi 2025-11-09 05:12:14 +08:00
parent 230f180dea
commit 21155bc4f8
56 changed files with 7760 additions and 256 deletions

View File

@ -25,6 +25,7 @@ class Settings(BaseSettings):
# Microservices
CONFIG_SERVICE_BASE_URL: str = "http://config-service:7000/api/v1"
DATA_PERSISTENCE_BASE_URL: str = "http://data-persistence-service:3000/api/v1"
class Config:
env_file = ".env"

View File

@ -1,18 +1,7 @@
"""
Application dependencies and providers
"""
from typing import AsyncGenerator
from fastapi import Depends
from sqlalchemy.ext.asyncio import AsyncSession
from app.core.database import AsyncSessionLocal
from app.services.config_manager import ConfigManager
async def get_db_session() -> AsyncGenerator[AsyncSession, None]:
"""Provides a database session to the application."""
async with AsyncSessionLocal() as session:
yield session
def get_config_manager(db_session: AsyncSession = Depends(get_db_session)) -> ConfigManager:
"""Dependency to get the configuration manager."""
return ConfigManager(db_session=db_session)
def get_config_manager() -> ConfigManager:
return ConfigManager()

View File

@ -21,9 +21,11 @@ from app.schemas.financial import (
AnalysisResponse,
AnalysisConfigResponse,
TodaySnapshotResponse,
RealTimeQuoteResponse,
)
from app.services.company_profile_client import CompanyProfileClient
from app.services.analysis_client import AnalysisClient, load_analysis_config, get_analysis_config
from app.services.data_persistence_client import DataPersistenceClient, NewAnalysisResult, DailyMarketData, DailyMarketDataBatch
# Lazy DataManager loader to avoid import-time failures when optional providers/config are missing
_dm = None
@ -248,6 +250,18 @@ async def generate_full_analysis(
if response.success:
completed_modules_content[module_type] = response.content
# Persist analysis result via Rust data-persistence-service
try:
dp = DataPersistenceClient()
await dp.create_analysis_result(NewAnalysisResult(
symbol=ts_code,
module_id=module_type,
model_name=response.model,
content=response.content,
meta_data={"tokens": response.tokens, "elapsed_ms": response.elapsed_ms},
))
except Exception as e:
logger.error(f"[Persistence] Failed to persist analysis result for {module_type}: {e}")
else:
# If a module fails, subsequent dependent modules will get an empty string for its context.
# This prevents total failure but may affect quality.
@ -734,7 +748,7 @@ async def generate_analysis(
logger.info(f"[API] Analysis generation completed, success={result.get('success')}")
return AnalysisResponse(
response = AnalysisResponse(
ts_code=ts_code,
company_name=company_name,
analysis_type=analysis_type,
@ -745,6 +759,22 @@ async def generate_analysis(
success=result.get("success", False),
error=result.get("error")
)
# Persist on success
if response.success:
try:
dp = DataPersistenceClient()
await dp.create_analysis_result(NewAnalysisResult(
symbol=ts_code,
module_id=analysis_type,
model_name=response.model,
content=response.content,
meta_data={"tokens": response.tokens, "elapsed_ms": response.elapsed_ms},
))
except Exception as e:
# Log and continue
logger = __import__("logging").getLogger(__name__)
logger.error(f"[Persistence] Failed to persist analysis result: {e}")
return response
@router.get("/china/{ts_code}/snapshot", response_model=TodaySnapshotResponse)
@ -767,47 +797,95 @@ async def get_today_snapshot(ts_code: str):
except Exception:
company_name = None
# 以“昨天”为查询日期provider 内部会解析为“不晚于该日的最近交易日”
# 以“昨天”为查询日期
base_dt = (datetime.now() - timedelta(days=1)).date()
base_str = base_dt.strftime("%Y%m%d")
# 从 daily_basic 取主要字段,包含 close、pe、pb、dv_ratio、total_mv
rows = await get_dm().get_data(
'get_daily_basic_points',
stock_code=ts_code,
trade_dates=[base_str]
# 通过数据持久化服务获取最近交易日快照向前看10天
dp = DataPersistenceClient()
start_dt = base_dt - timedelta(days=10)
daily_list = await dp.get_daily_data_by_symbol(
symbol=ts_code,
start_date=start_dt,
end_date=base_dt + timedelta(days=1)
)
row = None
if isinstance(rows, list) and rows:
# get_daily_basic_points 返回每个交易日一条记录
row = rows[0]
trade_date = None
# 缓存回填:若无数据,则从数据源抓取后写入 Rust 持久化服务
if not isinstance(daily_list, list) or len(daily_list) == 0:
try:
# 1) 优先用 daily_basic估值/市值更全)
rows = await get_dm().get_data(
'get_daily_basic_points',
stock_code=ts_code,
trade_dates=[base_str]
)
persisted = False
if isinstance(rows, list) and rows:
r = rows[0]
trade_date = str(r.get('trade_date') or r.get('trade_dt') or r.get('date') or base_str)
y, m, d = int(trade_date[:4]), int(trade_date[4:6]), int(trade_date[6:8])
record = DailyMarketData(
symbol=ts_code,
trade_date=datetime(y, m, d).date(),
open_price=None,
high_price=None,
low_price=None,
close_price=r.get('close'),
volume=r.get('vol') or r.get('volume'),
pe=r.get('pe'),
pb=r.get('pb'),
total_mv=r.get('total_mv'),
)
await dp.batch_insert_daily_data(DailyMarketDataBatch(records=[record]))
persisted = True
# 2) 如无 close再兜底用 daily仅价量
if not persisted:
d_rows = await get_dm().get_data('get_daily_points', stock_code=ts_code, trade_dates=[base_str])
if isinstance(d_rows, list) and d_rows:
d0 = d_rows[0]
trade_date = str(d0.get('trade_date') or d0.get('trade_dt') or d0.get('date') or base_str)
y, m, d = int(trade_date[:4]), int(trade_date[4:6]), int(trade_date[6:8])
record = DailyMarketData(
symbol=ts_code,
trade_date=datetime(y, m, d).date(),
open_price=d0.get('open'),
high_price=d0.get('high'),
low_price=d0.get('low'),
close_price=d0.get('close'),
volume=d0.get('vol') or d0.get('volume'),
pe=None,
pb=None,
total_mv=None,
)
await dp.batch_insert_daily_data(DailyMarketDataBatch(records=[record]))
# 3) 回读确认
daily_list = await dp.get_daily_data_by_symbol(
symbol=ts_code, start_date=start_dt, end_date=base_dt + timedelta(days=1)
)
except Exception:
# 回填失败不阻断流程
pass
# 选择 <= base_str 的最后一条记录
trade_date = base_str
close = None
pe = None
pb = None
dv_ratio = None
total_mv = None
if isinstance(row, dict):
trade_date = str(row.get('trade_date') or row.get('trade_dt') or row.get('date') or base_str)
close = row.get('close')
pe = row.get('pe')
pb = row.get('pb')
dv_ratio = row.get('dv_ratio')
total_mv = row.get('total_mv')
# 若 close 缺失,兜底从 daily 取收盘价
if close is None:
d_rows = await get_dm().get_data('get_daily_points', stock_code=ts_code, trade_dates=[base_str])
if isinstance(d_rows, list) and d_rows:
d = d_rows[0]
close = d.get('close')
if trade_date is None:
trade_date = str(d.get('trade_date') or d.get('trade_dt') or d.get('date') or base_str)
if trade_date is None:
trade_date = base_str
if isinstance(daily_list, list) and daily_list:
candidates = [d for d in daily_list if d.trade_date.strftime("%Y%m%d") <= base_str]
if candidates:
last = sorted(candidates, key=lambda r: r.trade_date.strftime("%Y%m%d"))[-1]
trade_date = last.trade_date.strftime("%Y%m%d")
close = last.close_price
pe = last.pe
pb = last.pb
total_mv = last.total_mv
# dv_ratio 可能没有,保持 None
return TodaySnapshotResponse(
ts_code=ts_code,
@ -830,9 +908,6 @@ async def get_market_snapshot(market: MarketEnum, stock_code: str):
- CN: 复用中国市场的快照逻辑daily_basic/daily
- 其他市场: 兜底使用日行情获取最近交易日收盘价其余字段暂返回空值
"""
if market == MarketEnum.cn:
return await get_today_snapshot(stock_code)
try:
# 公司名称(可选)
company_name = None
@ -846,43 +921,116 @@ async def get_market_snapshot(market: MarketEnum, stock_code: str):
base_dt = (datetime.now() - timedelta(days=1)).date()
base_str = base_dt.strftime("%Y%m%d")
# 为了稳妥拿到最近交易日,回看近 10 天
# 通过数据持久化服务获取最近交易日快照向前看10天
dp = DataPersistenceClient()
start_dt = base_dt - timedelta(days=10)
start_str = start_dt.strftime("%Y%m%d")
end_dt = base_dt + timedelta(days=1)
end_str = end_dt.strftime("%Y%m%d")
daily_list = await dp.get_daily_data_by_symbol(
symbol=stock_code,
start_date=start_dt,
end_date=base_dt + timedelta(days=1)
)
rows = await get_dm().get_daily_price(stock_code=stock_code, start_date=start_str, end_date=end_str)
trade_date = None
close = None
if isinstance(rows, list) and rows:
# 选择 <= base_str 的最后一条记录
# 缓存回填(所有市场通用):若无数据,从数据源抓取并写入,然后回读
if not isinstance(daily_list, list) or len(daily_list) == 0:
try:
candidates = [r for r in rows if str(r.get("trade_date") or r.get("date") or "") <= base_str]
if candidates:
last = sorted(candidates, key=lambda r: str(r.get("trade_date") or r.get("date") or ""))[-1]
trade_date = str(last.get("trade_date") or last.get("date") or base_str)
close = last.get("close")
start_str = start_dt.strftime("%Y%m%d")
end_str = (base_dt + timedelta(days=1)).strftime("%Y%m%d")
rows = await get_dm().get_daily_price(stock_code=stock_code, start_date=start_str, end_date=end_str)
last_rec = None
if isinstance(rows, list) and rows:
try:
candidates = [r for r in rows if str(r.get("trade_date") or r.get("date") or "") <= base_str]
if candidates:
last_rec = sorted(candidates, key=lambda r: str(r.get("trade_date") or r.get("date") or ""))[-1]
except Exception:
last_rec = None
if last_rec:
t = str(last_rec.get("trade_date") or last_rec.get("date") or base_str)
y, m, d = int(t[:4]), int(t[4:6]), int(t[6:8])
record = DailyMarketData(
symbol=stock_code,
trade_date=datetime(y, m, d).date(),
open_price=last_rec.get('open'),
high_price=last_rec.get('high'),
low_price=last_rec.get('low'),
close_price=last_rec.get('close'),
volume=last_rec.get('vol') or last_rec.get('volume'),
pe=None,
pb=None,
total_mv=None,
)
await dp.batch_insert_daily_data(DailyMarketDataBatch(records=[record]))
daily_list = await dp.get_daily_data_by_symbol(
symbol=stock_code, start_date=start_dt, end_date=base_dt + timedelta(days=1)
)
except Exception:
pass
if trade_date is None:
trade_date = base_str
trade_date = base_str
close = None
pe = None
pb = None
total_mv = None
if isinstance(daily_list, list) and daily_list:
candidates = [d for d in daily_list if d.trade_date.strftime("%Y%m%d") <= base_str]
if candidates:
last = sorted(candidates, key=lambda r: r.trade_date.strftime("%Y%m%d"))[-1]
trade_date = last.trade_date.strftime("%Y%m%d")
close = last.close_price
pe = last.pe
pb = last.pb
total_mv = last.total_mv
return TodaySnapshotResponse(
ts_code=stock_code,
trade_date=trade_date,
name=company_name,
close=close,
pe=None,
pb=None,
pe=pe if market == MarketEnum.cn else None,
pb=pb if market == MarketEnum.cn else None,
dv_ratio=None,
total_mv=None,
total_mv=total_mv if market == MarketEnum.cn else None,
)
except Exception as e:
raise HTTPException(status_code=500, detail=f"Failed to fetch snapshot: {e}")
@router.get("/{market}/{stock_code}/realtime", response_model=RealTimeQuoteResponse)
async def get_realtime_quote(
market: MarketEnum,
stock_code: str,
max_age_seconds: int = Query(30, ge=1, le=3600),
):
"""
实时报价严格 TTL无数据或过期即 404不做数据源兜底抓取
数据需由外部任务预热写入 Rust 持久化服务
"""
try:
dp = DataPersistenceClient()
quote = await dp.get_latest_realtime_quote(market.value, stock_code, max_age_seconds=max_age_seconds)
if not quote:
raise HTTPException(status_code=404, detail="quote not found or stale")
return RealTimeQuoteResponse(
symbol=quote.symbol,
market=quote.market,
ts=quote.ts.isoformat(),
price=quote.price,
open_price=quote.open_price,
high_price=quote.high_price,
low_price=quote.low_price,
prev_close=quote.prev_close,
change=quote.change,
change_percent=quote.change_percent,
volume=quote.volume,
source=quote.source,
)
except HTTPException:
raise
except Exception as e:
raise HTTPException(status_code=500, detail=f"Failed to fetch realtime quote: {e}")
@router.get("/china/{ts_code}/analysis/{analysis_type}/stream")
async def stream_analysis(
ts_code: str,

View File

@ -82,3 +82,18 @@ class TodaySnapshotResponse(BaseModel):
pb: Optional[float] = None
dv_ratio: Optional[float] = None
total_mv: Optional[float] = None
class RealTimeQuoteResponse(BaseModel):
symbol: str
market: str
ts: str
price: float
open_price: Optional[float] = None
high_price: Optional[float] = None
low_price: Optional[float] = None
prev_close: Optional[float] = None
change: Optional[float] = None
change_percent: Optional[float] = None
volume: Optional[int] = None
source: Optional[str] = None

View File

@ -1,71 +1,38 @@
"""
Configuration Management Service
Configuration Management Service (file + service based; no direct DB)
"""
import json
import os
import asyncio
from typing import Any, Dict
import asyncpg
import httpx
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.future import select
from app.models.system_config import SystemConfig
from app.schemas.config import ConfigResponse, ConfigUpdateRequest, DatabaseConfig, NewApiConfig, DataSourceConfig, ConfigTestResponse
from app.core.config import settings
class ConfigManager:
"""Manages system configuration by merging a static JSON file with dynamic settings from the database."""
"""Manages system configuration by fetching from config-service and updating local config."""
def __init__(self, db_session: AsyncSession, config_path: str = None):
self.db = db_session
def __init__(self, config_path: str = None):
if config_path is None:
# Default path: backend/app/services -> project_root/config/config.json
# __file__ = backend/app/services/config_manager.py
# go up three levels to project root
project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", ".."))
self.config_path = os.path.join(project_root, "config", "config.json")
else:
self.config_path = config_path
async def _fetch_base_config_from_service(self) -> Dict[str, Any]:
"""Fetch base configuration from config-service via HTTP."""
base_url = settings.CONFIG_SERVICE_BASE_URL.rstrip("/")
url = f"{base_url}/system"
try:
async with httpx.AsyncClient(timeout=10.0) as client:
resp = await client.get(url)
resp.raise_for_status()
data = resp.json()
return data if isinstance(data, dict) else {}
except Exception:
# 为保持兼容性(阶段性迁移),在失败时回退到本地文件读取
if os.path.exists(self.config_path):
try:
with open(self.config_path, "r", encoding="utf-8") as f:
return json.load(f)
except Exception:
return {}
return {}
async def _load_dynamic_config_from_db(self) -> Dict[str, Any]:
"""Loads dynamic configuration overrides from the database.
当数据库表尚未创建如开发环境未运行迁移优雅降级为返回空覆盖配置避免接口 500
"""
try:
db_configs: Dict[str, Any] = {}
result = await self.db.execute(select(SystemConfig))
for record in result.scalars().all():
db_configs[record.config_key] = record.config_value
return db_configs
except Exception:
# 表不存在或其他数据库错误时,忽略动态配置覆盖
return {}
async with httpx.AsyncClient(timeout=10.0) as client:
resp = await client.get(url)
resp.raise_for_status()
data = resp.json()
if not isinstance(data, dict):
raise ValueError("Config service 返回的系统配置格式错误")
return data
def _merge_configs(self, base: Dict[str, Any], overrides: Dict[str, Any]) -> Dict[str, Any]:
"""Deeply merges the override config into the base config."""
for key, value in overrides.items():
if isinstance(value, dict) and isinstance(base.get(key), dict):
base[key] = self._merge_configs(base[key], value)
@ -74,60 +41,45 @@ class ConfigManager:
return base
async def get_config(self) -> ConfigResponse:
"""Gets the final, merged configuration."""
base_config = await self._fetch_base_config_from_service()
db_config = await self._load_dynamic_config_from_db()
merged_config = self._merge_configs(base_config, db_config)
# 兼容两种位置:优先使用 new_api其次回退到 llm.new_api
new_api_src = merged_config.get("new_api") or merged_config.get("llm", {}).get("new_api", {})
new_api_src = base_config.get("new_api") or base_config.get("llm", {}).get("new_api", {})
return ConfigResponse(
database=DatabaseConfig(**merged_config.get("database", {})),
database=DatabaseConfig(**base_config.get("database", {})),
new_api=NewApiConfig(**(new_api_src or {})),
data_sources={
k: DataSourceConfig(**v)
for k, v in merged_config.get("data_sources", {}).items()
k: DataSourceConfig(**v)
for k, v in base_config.get("data_sources", {}).items()
}
)
async def update_config(self, config_update: ConfigUpdateRequest) -> ConfigResponse:
"""Updates configuration in the database and returns the new merged config."""
try:
update_dict = config_update.dict(exclude_unset=True)
# 验证配置数据
self._validate_config_data(update_dict)
for key, value in update_dict.items():
existing_config = await self.db.get(SystemConfig, key)
if existing_config:
# Merge with existing DB value before updating
if isinstance(existing_config.config_value, dict) and isinstance(value, dict):
merged_value = self._merge_configs(existing_config.config_value, value)
existing_config.config_value = merged_value
else:
existing_config.config_value = value
else:
new_config = SystemConfig(config_key=key, config_value=value)
self.db.add(new_config)
await self.db.commit()
return await self.get_config()
except Exception as e:
await self.db.rollback()
raise e
update_dict = config_update.dict(exclude_unset=True)
self._validate_config_data(update_dict)
# 直接写入项目根目录的 config.json
current = {}
if os.path.exists(self.config_path):
with open(self.config_path, "r", encoding="utf-8") as f:
current = json.load(f) or {}
merged = self._merge_configs(current, update_dict)
with open(self.config_path, "w", encoding="utf-8") as f:
json.dump(merged, f, ensure_ascii=False, indent=2)
# 返回合并后的视图(与 get_config 一致:从服务读取一次,避免多源不一致)
return await self.get_config()
def _validate_config_data(self, config_data: Dict[str, Any]) -> None:
"""Validate configuration data before saving."""
if "database" in config_data:
db_config = config_data["database"]
if "url" in db_config:
url = db_config["url"]
if not url.startswith(("postgresql://", "postgresql+asyncpg://")):
raise ValueError("数据库URL必须以 postgresql:// 或 postgresql+asyncpg:// 开头")
if "new_api" in config_data:
new_api_config = config_data["new_api"]
if "api_key" in new_api_config and len(new_api_config["api_key"]) < 10:
@ -136,14 +88,13 @@ class ConfigManager:
base_url = new_api_config["base_url"]
if not base_url.startswith(("http://", "https://")):
raise ValueError("New API Base URL必须以 http:// 或 https:// 开头")
if "data_sources" in config_data:
for source_name, source_config in config_data["data_sources"].items():
if "api_key" in source_config and len(source_config["api_key"]) < 10:
raise ValueError(f"{source_name} API Key长度不能少于10个字符")
async def test_config(self, config_type: str, config_data: Dict[str, Any]) -> ConfigTestResponse:
"""Test a specific configuration."""
try:
if config_type == "database":
return await self._test_database(config_data)
@ -154,92 +105,47 @@ class ConfigManager:
elif config_type == "finnhub":
return await self._test_finnhub(config_data)
else:
return ConfigTestResponse(
success=False,
message=f"不支持的配置类型: {config_type}"
)
return ConfigTestResponse(success=False, message=f"不支持的配置类型: {config_type}")
except Exception as e:
return ConfigTestResponse(
success=False,
message=f"测试失败: {str(e)}"
)
return ConfigTestResponse(success=False, message=f"测试失败: {str(e)}")
async def _test_database(self, config_data: Dict[str, Any]) -> ConfigTestResponse:
"""Test database connection."""
db_url = config_data.get("url")
if not db_url:
return ConfigTestResponse(
success=False,
message="数据库URL不能为空"
)
return ConfigTestResponse(success=False, message="数据库URL不能为空")
try:
# 解析数据库URL
if db_url.startswith("postgresql+asyncpg://"):
db_url = db_url.replace("postgresql+asyncpg://", "postgresql://")
# 测试连接
conn = await asyncpg.connect(db_url)
await conn.close()
return ConfigTestResponse(
success=True,
message="数据库连接成功"
)
return ConfigTestResponse(success=True, message="数据库连接成功")
except Exception as e:
return ConfigTestResponse(
success=False,
message=f"数据库连接失败: {str(e)}"
)
return ConfigTestResponse(success=False, message=f"数据库连接失败: {str(e)}")
async def _test_new_api(self, config_data: Dict[str, Any]) -> ConfigTestResponse:
"""Test New API (OpenAI-compatible) connection."""
api_key = config_data.get("api_key")
base_url = config_data.get("base_url")
if not api_key or not base_url:
return ConfigTestResponse(
success=False,
message="New API Key和Base URL均不能为空"
)
return ConfigTestResponse(success=False, message="New API Key和Base URL均不能为空")
try:
async with httpx.AsyncClient(timeout=10.0) as client:
# Test API availability by listing models
response = await client.get(
f"{base_url.rstrip('/')}/models",
headers={"Authorization": f"Bearer {api_key}"}
)
if response.status_code == 200:
return ConfigTestResponse(
success=True,
message="New API连接成功"
)
return ConfigTestResponse(success=True, message="New API连接成功")
else:
return ConfigTestResponse(
success=False,
message=f"New API测试失败: HTTP {response.status_code} - {response.text}"
)
return ConfigTestResponse(success=False, message=f"New API测试失败: HTTP {response.status_code} - {response.text}")
except Exception as e:
return ConfigTestResponse(
success=False,
message=f"New API连接失败: {str(e)}"
)
return ConfigTestResponse(success=False, message=f"New API连接失败: {str(e)}")
async def _test_tushare(self, config_data: Dict[str, Any]) -> ConfigTestResponse:
"""Test Tushare API connection."""
api_key = config_data.get("api_key")
if not api_key:
return ConfigTestResponse(
success=False,
message="Tushare API Key不能为空"
)
return ConfigTestResponse(success=False, message="Tushare API Key不能为空")
try:
async with httpx.AsyncClient(timeout=10.0) as client:
# 测试API可用性
response = await client.post(
"http://api.tushare.pro",
json={
@ -249,67 +155,34 @@ class ConfigManager:
"fields": "ts_code"
}
)
if response.status_code == 200:
data = response.json()
if data.get("code") == 0:
return ConfigTestResponse(
success=True,
message="Tushare API连接成功"
)
return ConfigTestResponse(success=True, message="Tushare API连接成功")
else:
return ConfigTestResponse(
success=False,
message=f"Tushare API错误: {data.get('msg', '未知错误')}"
)
return ConfigTestResponse(success=False, message=f"Tushare API错误: {data.get('msg', '未知错误')}")
else:
return ConfigTestResponse(
success=False,
message=f"Tushare API测试失败: HTTP {response.status_code}"
)
return ConfigTestResponse(success=False, message=f"Tushare API测试失败: HTTP {response.status_code}")
except Exception as e:
return ConfigTestResponse(
success=False,
message=f"Tushare API连接失败: {str(e)}"
)
return ConfigTestResponse(success=False, message=f"Tushare API连接失败: {str(e)}")
async def _test_finnhub(self, config_data: Dict[str, Any]) -> ConfigTestResponse:
"""Test Finnhub API connection."""
api_key = config_data.get("api_key")
if not api_key:
return ConfigTestResponse(
success=False,
message="Finnhub API Key不能为空"
)
return ConfigTestResponse(success=False, message="Finnhub API Key不能为空")
try:
async with httpx.AsyncClient(timeout=10.0) as client:
# 测试API可用性
response = await client.get(
f"https://finnhub.io/api/v1/quote",
"https://finnhub.io/api/v1/quote",
params={"symbol": "AAPL", "token": api_key}
)
if response.status_code == 200:
data = response.json()
if "c" in data: # 检查是否有价格数据
return ConfigTestResponse(
success=True,
message="Finnhub API连接成功"
)
if "c" in data:
return ConfigTestResponse(success=True, message="Finnhub API连接成功")
else:
return ConfigTestResponse(
success=False,
message="Finnhub API响应格式错误"
)
return ConfigTestResponse(success=False, message="Finnhub API响应格式错误")
else:
return ConfigTestResponse(
success=False,
message=f"Finnhub API测试失败: HTTP {response.status_code}"
)
return ConfigTestResponse(success=False, message=f"Finnhub API测试失败: HTTP {response.status_code}")
except Exception as e:
return ConfigTestResponse(
success=False,
message=f"Finnhub API连接失败: {str(e)}"
)
return ConfigTestResponse(success=False, message=f"Finnhub API连接失败: {str(e)}")

View File

@ -0,0 +1,182 @@
from __future__ import annotations
import datetime as dt
from typing import Any, Dict, List, Optional
import httpx
from pydantic import BaseModel
from app.core.config import settings
class CompanyProfile(BaseModel):
symbol: str
name: str
industry: Optional[str] = None
list_date: Optional[dt.date] = None
additional_info: Optional[Dict[str, Any]] = None
class TimeSeriesFinancial(BaseModel):
symbol: str
metric_name: str
period_date: dt.date
value: float
source: Optional[str] = None
class TimeSeriesFinancialBatch(BaseModel):
records: List[TimeSeriesFinancial]
class DailyMarketData(BaseModel):
symbol: str
trade_date: dt.date
open_price: Optional[float] = None
high_price: Optional[float] = None
low_price: Optional[float] = None
close_price: Optional[float] = None
volume: Optional[int] = None
pe: Optional[float] = None
pb: Optional[float] = None
total_mv: Optional[float] = None
class DailyMarketDataBatch(BaseModel):
records: List[DailyMarketData]
class RealtimeQuote(BaseModel):
symbol: str
market: str
ts: dt.datetime
price: float
open_price: Optional[float] = None
high_price: Optional[float] = None
low_price: Optional[float] = None
prev_close: Optional[float] = None
change: Optional[float] = None
change_percent: Optional[float] = None
volume: Optional[int] = None
source: Optional[str] = None
class NewAnalysisResult(BaseModel):
symbol: str
module_id: str
model_name: Optional[str] = None
content: str
meta_data: Optional[Dict[str, Any]] = None
class AnalysisResult(BaseModel):
id: str
symbol: str
module_id: str
generated_at: dt.datetime
model_name: Optional[str] = None
content: str
meta_data: Optional[Dict[str, Any]] = None
class DataPersistenceClient:
def __init__(self, base_url: Optional[str] = None, timeout: float = 20.0):
self.base_url = (base_url or settings.DATA_PERSISTENCE_BASE_URL).rstrip("/")
self.timeout = timeout
async def _client(self) -> httpx.AsyncClient:
return httpx.AsyncClient(base_url=self.base_url, timeout=self.timeout)
# Companies
async def upsert_company(self, profile: CompanyProfile) -> None:
async with await self._client() as client:
resp = await client.put("/companies", json=profile.model_dump(mode="json"))
resp.raise_for_status()
async def get_company(self, symbol: str) -> CompanyProfile:
async with await self._client() as client:
resp = await client.get(f"/companies/{symbol}")
resp.raise_for_status()
return CompanyProfile.model_validate(resp.json())
# Financials
async def batch_insert_financials(self, batch: TimeSeriesFinancialBatch) -> None:
async with await self._client() as client:
resp = await client.post("/market-data/financials/batch", json=batch.model_dump(mode="json"))
resp.raise_for_status()
async def get_financials_by_symbol(self, symbol: str, metrics: Optional[List[str]] = None) -> List[TimeSeriesFinancial]:
params = {}
if metrics:
params["metrics"] = ",".join(metrics)
async with await self._client() as client:
resp = await client.get(f"/market-data/financials/{symbol}", params=params)
resp.raise_for_status()
return [TimeSeriesFinancial.model_validate(item) for item in resp.json()]
# Daily data
async def batch_insert_daily_data(self, batch: DailyMarketDataBatch) -> None:
async with await self._client() as client:
resp = await client.post("/market-data/daily/batch", json=batch.model_dump(mode="json"))
resp.raise_for_status()
async def get_daily_data_by_symbol(
self,
symbol: str,
start_date: Optional[dt.date] = None,
end_date: Optional[dt.date] = None,
) -> List[DailyMarketData]:
params = {}
if start_date:
params["start_date"] = start_date.isoformat()
if end_date:
params["end_date"] = end_date.isoformat()
async with await self._client() as client:
resp = await client.get(f"/market-data/daily/{symbol}", params=params)
resp.raise_for_status()
return [DailyMarketData.model_validate(item) for item in resp.json()]
# Realtime quotes
async def upsert_realtime_quote(self, quote: RealtimeQuote) -> None:
async with await self._client() as client:
resp = await client.post("/market-data/quotes", json=quote.model_dump(mode="json"))
resp.raise_for_status()
async def get_latest_realtime_quote(
self,
market: str,
symbol: str,
max_age_seconds: Optional[int] = None,
) -> Optional[RealtimeQuote]:
params = {"market": market}
if max_age_seconds is not None:
params["max_age_seconds"] = int(max_age_seconds)
async with await self._client() as client:
resp = await client.get(f"/market-data/quotes/{symbol}", params=params)
if resp.status_code == 404:
return None
resp.raise_for_status()
return RealtimeQuote.model_validate(resp.json())
# Analysis results
async def create_analysis_result(self, new_result: NewAnalysisResult) -> AnalysisResult:
async with await self._client() as client:
resp = await client.post("/analysis-results", json=new_result.model_dump(mode="json"))
resp.raise_for_status()
return AnalysisResult.model_validate(resp.json())
async def get_analysis_results(self, symbol: str, module_id: Optional[str] = None) -> List[AnalysisResult]:
params = {"symbol": symbol}
if module_id:
params["module_id"] = module_id
async with await self._client() as client:
resp = await client.get("/analysis-results", params=params)
resp.raise_for_status()
return [AnalysisResult.model_validate(item) for item in resp.json()]
async def get_analysis_result_by_id(self, result_id: str) -> AnalysisResult:
async with await self._client() as client:
resp = await client.get(f"/analysis-results/{result_id}")
resp.raise_for_status()
return AnalysisResult.model_validate(resp.json())

View File

@ -2,8 +2,9 @@ version: "3.9"
services:
postgres-db:
image: postgres:16-alpine
image: timescale/timescaledb:2.15.2-pg16
container_name: fundamental-postgres
command: -c shared_preload_libraries=timescaledb
environment:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
@ -18,6 +19,25 @@ services:
ports:
- "15432:5432"
data-persistence-service:
build:
context: ./services/data-persistence-service
dockerfile: Dockerfile
container_name: data-persistence-service
environment:
HOST: 0.0.0.0
PORT: 3000
# Rust service connects to the internal DB service name
DATABASE_URL: postgresql://postgres:postgres@postgres-db:5432/fundamental
ports:
- "13000:3000"
depends_on:
postgres-db:
condition: service_healthy
# If you prefer live-reload or local code mount, consider switching to a dev Dockerfile.
# volumes:
# - ./:/workspace
backend:
build:
context: .
@ -28,20 +48,20 @@ services:
environment:
PYTHONDONTWRITEBYTECODE: "1"
PYTHONUNBUFFERED: "1"
# SQLAlchemy async driver
DATABASE_URL: postgresql+asyncpg://postgres:postgres@postgres-db:5432/fundamental
# Config service base URL
CONFIG_SERVICE_BASE_URL: http://config-service:7000/api/v1
# Data persistence service base URL
DATA_PERSISTENCE_BASE_URL: http://data-persistence-service:3000/api/v1
volumes:
# 挂载整个项目,确保后端代码中对项目根目录的相对路径(如 config/)仍然有效
- ./:/workspace
ports:
- "18000:8000"
depends_on:
postgres-db:
condition: service_healthy
config-service:
condition: service_started
data-persistence-service:
condition: service_started
frontend:
build:

View File

@ -0,0 +1,144 @@
# 数据库表结构设计 (`database_schema_design.md`)
## 1. 核心设计哲学与技术选型
经过深入讨论,我们确立了以**“为不同形态的数据建立专属的、高度优化的持久化方案”**为核心的设计哲学。这完美契合了项目追求稳定、健壮的“Rustic”风格。
我们的数据库技术栈将统一在 **PostgreSQL** 上,并通过其强大的扩展生态来满足特定的数据存储需求。
### 1.1. 时间序列数据: PostgreSQL + TimescaleDB
对于系统中最核心、数据量最大的**时间序列数据**(如财务指标、市场行情),我们明确采用 **TimescaleDB** 扩展。
- **为什么选择 TimescaleDB?**
- **解决性能瓶颈**: 它通过 **Hypertables (超表)** 机制,将一张巨大的时序表在物理上切分为按时间范围管理的小块 (Chunks)。这使得写入和基于时间的查询性能能够保持恒定的高速,不会随数据量增长而衰减。
- **支持稀疏与乱序数据**: 它的架构天然支持稀疏和乱序的数据写入,完美契合我们“有啥就存啥、随时补齐”的数据采集模式。
- **内置高级功能**: 它提供了强大的**持续聚合 (Continuous Aggregates)** 功能,可以高效地、自动化地将高频数据(如 Ticks降采样为分钟、小时、天等级别的聚合数据K线且查询速度极快。
- **零技术栈增加**: 它是一个 PostgreSQL 扩展,我们仍然使用标准 SQL 进行所有操作,无需引入和维护新的数据库系统。
### 1.2. 其他数据类型
- **生成式分析内容**: 使用标准的关系表,将结构化的元数据作为索引字段,将非结构化的文本存入 `TEXT` 字段。
- **静态与半静态数据**: 使用标准的关系表进行存储。
- **工作流与应用配置**: **优先使用 YAML 配置文件** (`config/analysis-config.yaml` 等) 来定义静态的工作流和分析模块。数据库仅用于存储需要通过管理界面动态修改的系统级配置。
- **执行过程元数据**: 使用标准的关系表来记录任务执行的结构化日志。
## 2. 详细 Schema 设计
### 2.1. 时间序列数据表
#### 2.1.1. `time_series_financials` (财务指标表)
```sql
-- 1. 创建标准的关系表
CREATE TABLE time_series_financials (
symbol VARCHAR(32) NOT NULL,
metric_name VARCHAR(64) NOT NULL, -- 标准化指标名 (e.g., 'roe', 'revenue')
period_date DATE NOT NULL, -- 报告期 (e.g., '2023-12-31')
value NUMERIC NOT NULL, -- 指标值
source VARCHAR(64), -- 数据来源 (e.g., 'tushare')
PRIMARY KEY (symbol, metric_name, period_date)
);
-- 2. 将其转换为 TimescaleDB 的超表
SELECT create_hypertable('time_series_financials', 'period_date');
COMMENT ON TABLE time_series_financials IS '存储标准化的、以时间序列格式存在的财务指标,由 TimescaleDB 管理';
```
#### 2.1.2. `daily_market_data` (每日市场数据表)
```sql
-- 1. 创建标准的关系表
CREATE TABLE daily_market_data (
symbol VARCHAR(32) NOT NULL,
trade_date DATE NOT NULL,
open_price NUMERIC,
high_price NUMERIC,
low_price NUMERIC,
close_price NUMERIC,
volume BIGINT,
pe NUMERIC,
pb NUMERIC,
total_mv NUMERIC, -- 总市值
PRIMARY KEY (symbol, trade_date)
);
-- 2. 将其转换为 TimescaleDB 的超表
SELECT create_hypertable('daily_market_data', 'trade_date');
COMMENT ON TABLE daily_market_data IS '存储每日更新的股价、成交量和关键估值指标,由 TimescaleDB 管理';
```
---
### 2.2. `analysis_results` (AI分析结果表)
```sql
CREATE TABLE analysis_results (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
symbol VARCHAR(32) NOT NULL,
module_id VARCHAR(64) NOT NULL, -- 分析模块ID (e.g., 'bull_case')
generated_at TIMESTAMTz NOT NULL DEFAULT NOW(),
model_name VARCHAR(64), -- 使用的AI模型
content TEXT NOT NULL, -- AI生成的完整文本
meta_data JSONB -- 用于存储token用量、耗时等元数据
);
COMMENT ON TABLE analysis_results IS '存储由AI大模型生成的分析报告文本';
CREATE INDEX idx_analysis_results_symbol_module ON analysis_results (symbol, module_id, generated_at DESC);
```
---
### 2.3. `company_profiles` (公司基本信息表)
```sql
CREATE TABLE company_profiles (
symbol VARCHAR(32) PRIMARY KEY, -- 标准化股票代码
name VARCHAR(255) NOT NULL, -- 公司名称
industry VARCHAR(255), -- 行业
list_date DATE, -- 上市日期
additional_info JSONB, -- 其他信息
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
COMMENT ON TABLE company_profiles IS '存储公司的基本、相对静态的信息';
```
---
### 2.4. `system_config` (系统配置表)
```sql
CREATE TABLE system_config (
config_key VARCHAR(255) PRIMARY KEY,
config_value JSONB NOT NULL,
description TEXT,
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
COMMENT ON TABLE system_config IS '存储可通过UI动态修改的系统级配置敏感信息(API Key)不应存储于此';
```
---
### 2.5. `execution_logs` (执行过程日志表)
```sql
CREATE TABLE execution_logs (
id BIGSERIAL PRIMARY KEY,
report_id UUID NOT NULL, -- 关联的报告ID
step_name VARCHAR(255) NOT NULL, -- 步骤名称
status VARCHAR(32) NOT NULL, -- 'running', 'completed', 'failed'
start_time TIMESTAMPTZ NOT NULL,
end_time TIMESTAMPTZ,
duration_ms INTEGER,
token_usage JSONB, -- { "prompt": 100, "completion": 200 }
error_message TEXT,
log_details JSONB
);
COMMENT ON TABLE execution_logs IS '记录报告生成过程中每个步骤的结构化日志';
CREATE INDEX idx_execution_logs_report_id ON execution_logs (report_id);
```

View File

@ -0,0 +1,198 @@
# Rust 数据持久化服务设计 (`rust_data_service_design.md`)
## 1. 服务定位与核心职责
- **服务名称**: `data-persistence-service`
- **核心定位**: 本服务是整个微服务架构中**唯一的数据持久化层**。它是数据库的**独占所有者 (Sole Owner)**,负责管理所有与数据库的交互。
### 1.1. 职责边界:核心实体服务
本服务被设计为**核心实体数据服务**,而非一个包罗万象的、管理所有数据的“上帝服务”。它的职责被严格限定在管理那些**跨多个业务领域共享的核心数据实体**上。
这种设计遵循了一种务实的**混合微服务数据模式**
- **核心数据集中管理**: 保证了通用数据的唯一性和一致性。我们定义的核心实体包括:
- 公司基本信息 (`company_profiles`)
- 标准化财务数据 (`time_series_financials`)
- 标准化市场数据 (`daily_market_data`)
- AI分析结果 (`analysis_results`) - 作为一种可被多方消费的核心产出物。
- **业务数据独立持久化**: 未来新增的、具有独立业务领域的微服务(例如“量化回测服务”)将被**允许并鼓励拥有和管理自己的数据库 Schema 或表**。这些新服务在需要核心实体数据时,应通过调用本服务提供的 API 来获取,而不是直接连接数据库。
这一策略确保了核心数据的一致性,同时为新服务的独立开发和快速迭代提供了最大的灵活性。
## 2. 技术选型与开发范式
### 2.1. 核心技术栈
- **语言**: **Rust**
- **开发套件**: **`service_kit`** (项目内置的一站式微服务开发套件)
- **Web 框架**: **`axum`**
- **数据库交互**: **`sqlx`**
- **序列化/反序列化**: **`serde`** (由 `service_kit` 自动集成)
### 2.2. 开发范式API 规范驱动
我们将采纳 `service_kit` 提供的、以 **OpenAPI** 规范为核心的开发范式。
- **数据契约**: 所有的数据传输对象 (DTOs) 都将使用 `service_kit` 提供的 `#[api_dto]` 宏进行标注。此宏会自动派生 `serde``utoipa::ToSchema`,确保我们的 Rust 代码即是 API 规范的“唯一事实源”。
- **前后端协同**: 我们将使用 `cargo forge generate-types` 命令,从服务自动生成的 OpenAPI 规范中,为前端项目生成 TypeScript 类型定义,实现端到端的类型安全。
- **数据交换格式**: 服务间的数据交换格式依然是 **JSON**
## 3. API 端点设计 (API Endpoint Design)
API 的设计严格服务于对核心实体的通用读写操作。
---
### 3.1. 公司信息 (`/companies`)
- **对应表**: `company_profiles`
| Method | Endpoint | 描述 |
| :--- | :--- | :--- |
| `PUT` | `/api/v1/companies` | 创建或更新Upsert一个公司的基本信息 |
| `GET` | `/api/v1/companies/{symbol}` | 获取指定公司的基本信息 |
---
### 3.2. 市场与财务数据 (`/market-data`)
- **对应表**: `time_series_financials`, `daily_market_data`
| Method | Endpoint | 描述 |
| :--- | :--- | :--- |
| `POST` | `/api/v1/market-data/financials/batch` | 批量写入多条时间序列财务指标 |
| `GET` | `/api/v1/market-data/financials/{symbol}` | 查询某公司的财务指标 (支持按 `metrics`, `start_date`, `end_date` 过滤) |
| `POST` | `/api/v1/market-data/daily/batch` | 批量写入多条每日市场行情数据 |
| `GET` | `/api/v1/market-data/daily/{symbol}` | 查询某公司的每日行情 (支持按 `start_date`, `end_date` 过滤) |
---
### 3.3. AI 分析结果 (`/analysis-results`)
- **对应表**: `analysis_results`
| Method | Endpoint | 描述 |
| :--- | :--- | :--- |
| `POST` | `/api/v1/analysis-results` | 保存一条新的 AI 分析结果 |
| `GET` | `/api/v1/analysis-results` | 查询分析结果列表 (支持按 `symbol`, `module_id` 过滤) |
| `GET` | `/api/v1/analysis-results/{id}` | 获取单条分析结果的详情 |
---
### 3.4. 系统配置 (`/system-config`)
- **对应表**: `system_config`
| Method | Endpoint | 描述 |
| :--- | :--- | :--- |
| `PUT` | `/api/v1/system-config/{key}` | 创建或更新一条键值对配置 |
| `GET` | `/api/v1/system-config/{key}` | 获取一条键值对配置 |
## 4. 数据传输对象 (DTOs)
所有 API 的请求体和响应体都将使用 `service_kit``#[api_dto]` 宏进行定义以自动获得序列化、API Schema 生成和调试能力。
```rust
use service_kit::macros::api_dto;
// 示例:用于批量写入财务数据的 DTO
#[api_dto]
pub struct TimeSeriesFinancialDto {
pub symbol: String,
pub metric_name: String,
pub period_date: chrono::NaiveDate,
pub value: f64,
pub source: Option<String>,
}
// 示例:用于创建 AI 分析结果的 DTO
#[api_dto]
pub struct NewAnalysisResultDto {
pub symbol: String,
pub module_id: String,
pub model_name: Option<String>,
pub content: String,
pub meta_data: Option<serde_json::Value>,
}
```
## 5. 开发流程与工具链
本服务将完全遵循 `service_kit` 提供的标准化开发流程。
- **项目初始化**: 使用 `cargo generate --git <repo_url> service-template` 创建服务骨架。
- **质量保障**:
- 代码风格检查: `cargo forge lint`
- 单元与集成测试: `cargo forge test`
- **API 调试与交互**: 使用 `forge-cli` 工具,通过 `cargo forge <command>` 与正在运行的服务进行交互式 API 调用和调试。
- **前端协同**: 在 CI/CD 流程或本地开发中,通过 `cargo forge generate-types` 命令,自动将本服务的 API 类型同步到前端项目。
## 6. 项目结构(建议)
```
/data-persistence-service
├── Cargo.toml
└── src/
├── main.rs # 应用入口, 初始化数据库连接池, 定义路由
├── error.rs # 统一的错误处理类型
├── db.rs # 数据库交互逻辑 (使用 sqlx)
├── models.rs # 数据库表对应的结构体
├── dtos.rs # API 请求/响应对应的结构体
└── api/
├── mod.rs
├── companies.rs
├── market_data.rs
└── analysis.rs
```
## 7. 实施计划 (Implementation Plan & To-Do List)
本部分将开发 `data-persistence-service` 的过程分解为一系列可执行、可追踪的任务。
### Phase 1: 项目初始化与基础设置
- [x] **T1.1**: 使用 `cargo generate``service-template``services/data-persistence-service` 目录下初始化新项目。
- [x] **T1.2**: 清理模板中的示例代码(如 `hello` 模块)。
- [x] **T1.3**: 配置 `Cargo.toml`,添加 `sqlx` (with `postgres`, `runtime-tokio-rustls`, `chrono`, `uuid`, `json`), `axum`, `tokio`, `serde` 等核心依赖。
- [x] **T1.4**: 设置 `.env` 文件,用于管理 `DATABASE_URL` 等环境变量。
- [x] **T1.5**: 在 `main.rs` 中建立与 PostgreSQL 的数据库连接池 (`sqlx::PgPool`)。
### Phase 2: 数据库集成与迁移
- [x] **T2.1**: 安装 `sqlx-cli` (`cargo install sqlx-cli`)。
- [x] **T2.2**: 使用 `sqlx-cli` 初始化迁移目录 (`sqlx migrate add create_initial_tables`)。
- [x] **T2.3**: 在生成的迁移 SQL 文件中,编写 `CREATE TABLE` 语句,创建 `docs/database_schema_design.md` 中定义的所有表 (`company_profiles`, `time_series_financials` 等)。
- [x] **T2.4**: 在迁移 SQL 文件中,为时序表 (`time_series_financials`, `daily_market_data`) 添加 `create_hypertable` 命令。
- [x] **T2.5**: 运行 `sqlx migrate run` 应用迁移,并在数据库中验证表结构是否正确创建。
- [x] **T2.6**: 在 `src/models.rs` 中,根据数据库表结构,编写对应的 Rust 结构体。
### Phase 3: 核心 API 实现
- [x] **T3.1**: **Companies API**:
- [x] 在 `src/dtos.rs` 中创建 `CompanyProfileDto`
- [x] 在 `src/db.rs` 中实现 `upsert_company``get_company_by_symbol` 数据库操作函数。
- [x] 在 `src/api/companies.rs` 中创建 `PUT /api/v1/companies``GET /api/v1/companies/{symbol}``axum` handler并连接到 `db` 函数。
- [x] **T3.2**: **Market Data API**:
- [x] 在 `src/dtos.rs` 中创建 `TimeSeriesFinancialDto``DailyMarketDataDto`
- [x] 在 `src/db.rs` 中实现 `batch_insert_financials``get_financials_by_symbol` 函数。
- [x] 在 `src/db.rs` 中实现 `batch_insert_daily_data``get_daily_data_by_symbol` 函数。
- [x] 在 `src/api/market_data.rs` 中创建对应的 `axum` handlers 和路由。
- [x] **T3.3**: **Analysis Results API**:
- [x] 在 `src/dtos.rs` 中创建 `NewAnalysisResultDto``AnalysisResultDto`
- [x] 在 `src/db.rs` 中实现 `create_analysis_result``get_analysis_results` 函数。
- [x] 在 `src/api/analysis.rs` 中创建对应的 `axum` handlers 和路由。
- [x] **T3.4**: 在 `main.rs` 中,将所有 API 路由组合起来。
### Phase 4: 容器化与集成
- [x] **T4.1**: 编写多阶段 `Dockerfile`,优化镜像大小和构建速度。
- [x] **T4.2**: 在根目录的 `docker-compose.yml` 中,添加 `data-persistence-service` 的定义,并配置其依赖 `postgres-db`
- [x] **T4.3**: 修改 `Tiltfile` 以包含新的 Rust 服务,确保 `tilt up` 可以成功构建并运行该服务。
- [x] **T4.4**: **(集成点)** 修改现有的 Python `backend` 服务,使其不再直接连接数据库,而是通过 HTTP 请求调用 `data-persistence-service` 的 API 来读写数据。
### Phase 5: 测试与文档
- [x] **T5.1**: 为 `db.rs` 中的每个数据库操作函数编写单元测试(需要 `sqlx` 的 test-macros 特性)。
- [x] **T5.2**: 为每个 API 端点编写集成测试。
- [ ] **T5.3**: 使用 `#[api_dto]` 宏确保所有 DTO 都已正确集成到 OpenAPI 规范中。
- [ ] **T5.4**: 运行 `cargo forge generate-types`,验证能否成功生成 TypeScript 类型文件。
- [ ] **T5.5**: 编写 `README.md`,说明如何本地启动、配置和测试该服务。

View File

@ -1,7 +1,7 @@
'use client';
import { useParams, useSearchParams } from 'next/navigation';
import { useChinaFinancials, useFinancials, useFinancialConfig, useAnalysisConfig, generateFullAnalysis, useSnapshot } from '@/hooks/useApi';
import { useChinaFinancials, useFinancials, useFinancialConfig, useAnalysisConfig, generateFullAnalysis, useSnapshot, useRealtimeQuote } from '@/hooks/useApi';
import { Spinner } from '@/components/ui/spinner';
import { Button } from '@/components/ui/button';
import { CheckCircle, XCircle, RotateCw } from 'lucide-react';
@ -54,6 +54,7 @@ export default function ReportPage() {
const isLoading = chinaFin.isLoading || otherFin.isLoading;
const unifiedSymbol = isChina ? normalizedTsCode : symbol;
const { data: snapshot, error: snapshotError, isLoading: snapshotLoading } = useSnapshot(normalizedMarket, unifiedSymbol);
const { data: realtime, error: realtimeError, isLoading: realtimeLoading } = useRealtimeQuote(normalizedMarket, unifiedSymbol, { maxAgeSeconds: 30, refreshIntervalMs: 5000 });
const { data: financialConfig } = useFinancialConfig();
const { data: analysisConfig } = useAnalysisConfig();
@ -797,10 +798,25 @@ export default function ReportPage() {
<TabsContent value="chart" className="space-y-4">
<h2 className="text-lg font-medium"> TradingView</h2>
<div className="flex items-center gap-3 text-sm mb-4">
<CheckCircle className="size-4 text-green-600" />
<div className="text-muted-foreground">
- {unifiedSymbol}
<div className="flex items-center justify-between text-sm mb-4">
<div className="flex items-center gap-3">
<CheckCircle className="size-4 text-green-600" />
<div className="text-muted-foreground">
- {unifiedSymbol}
</div>
</div>
<div className="text-xs text-muted-foreground">
{realtimeLoading ? (
<span className="inline-flex items-center gap-2"><Spinner className="size-3" /> </span>
) : realtimeError ? (
<span className="text-red-500"></span>
) : realtime ? (
<span>
{realtime.price.toLocaleString()}{new Date(realtime.ts).toLocaleString()}
</span>
) : (
<span></span>
)}
</div>
</div>

View File

@ -1,6 +1,6 @@
import useSWR from 'swr';
import { useConfigStore } from '@/stores/useConfigStore';
import { BatchFinancialDataResponse, FinancialConfigResponse, AnalysisConfigResponse, TodaySnapshotResponse } from '@/types';
import { BatchFinancialDataResponse, FinancialConfigResponse, AnalysisConfigResponse, TodaySnapshotResponse, RealTimeQuoteResponse } from '@/types';
const fetcher = async (url: string) => {
const res = await fetch(url);
@ -168,3 +168,36 @@ export function useSnapshot(market?: string, stockCode?: string) {
}
);
}
export function useRealtimeQuote(
market?: string,
stockCode?: string,
options?: {
maxAgeSeconds?: number;
refreshIntervalMs?: number;
}
) {
const normalizeMarket = (m?: string) => {
const t = (m || '').toLowerCase();
if (t === 'usa') return 'us';
if (t === 'china') return 'cn';
if (t === 'hkex') return 'hk';
if (t === 'jpn') return 'jp';
return t;
};
const mkt = normalizeMarket(market);
const maxAge = options?.maxAgeSeconds ?? 30;
const refreshMs = options?.refreshIntervalMs ?? 5000;
return useSWR<RealTimeQuoteResponse>(
mkt && stockCode ? `/api/financials/${encodeURIComponent(mkt)}/${encodeURIComponent(stockCode)}/realtime?max_age_seconds=${encodeURIComponent(String(maxAge))}` : null,
fetcher,
{
revalidateOnFocus: false,
revalidateOnReconnect: false,
refreshInterval: refreshMs,
dedupingInterval: Math.min(1000, refreshMs),
shouldRetryOnError: false,
errorRetryCount: 0,
}
);
}

View File

@ -209,6 +209,24 @@ export interface TodaySnapshotResponse {
total_mv?: number | null; // 万元
}
/**
* TTL
*/
export interface RealTimeQuoteResponse {
symbol: string;
market: string;
ts: string; // ISO8601
price: number;
open_price?: number | null;
high_price?: number | null;
low_price?: number | null;
prev_close?: number | null;
change?: number | null;
change_percent?: number | null;
volume?: number | null;
source?: string | null;
}
// ============================================================================
// 表格相关类型
// ============================================================================

Binary file not shown.

View File

@ -0,0 +1,6 @@
use data_persistence_service::{
db,
dtos::{CompanyProfileDto, DailyMarketDataDto, NewAnalysisResultDto, TimeSeriesFinancialDto},
models,
};
use sqlx::{postgres::PgPoolOptions, PgPool};

View File

@ -0,0 +1,8 @@
[alias]
# Require `forge-cli` to be installed once: `cargo install service_kit --features api-cli`
# Then `cargo forge ...` will forward args to the installed `forge-cli` binary.
forge = "forge-cli --"
## Note:
## We intentionally avoid local path patches in container builds to ensure reproducibility.
## Use crates.io or git dependencies via Cargo.toml instead.

View File

@ -0,0 +1,18 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO company_profiles (symbol, name, industry, list_date, additional_info, updated_at)\n VALUES ($1, $2, $3, $4, $5, NOW())\n ON CONFLICT (symbol) DO UPDATE SET\n name = EXCLUDED.name,\n industry = EXCLUDED.industry,\n list_date = EXCLUDED.list_date,\n additional_info = EXCLUDED.additional_info,\n updated_at = NOW()\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Varchar",
"Varchar",
"Varchar",
"Date",
"Jsonb"
]
},
"nullable": []
},
"hash": "21a6b3602a199978f87186634866e7bd72a083ebd55985acae1d712434e2ebb6"
}

View File

@ -0,0 +1,95 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT symbol, market, ts, price, open_price, high_price, low_price, prev_close, change, change_percent, volume, source, updated_at\n FROM realtime_quotes\n WHERE symbol = $1 AND market = $2\n ORDER BY ts DESC\n LIMIT 1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "symbol",
"type_info": "Varchar"
},
{
"ordinal": 1,
"name": "market",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "ts",
"type_info": "Timestamptz"
},
{
"ordinal": 3,
"name": "price",
"type_info": "Numeric"
},
{
"ordinal": 4,
"name": "open_price",
"type_info": "Numeric"
},
{
"ordinal": 5,
"name": "high_price",
"type_info": "Numeric"
},
{
"ordinal": 6,
"name": "low_price",
"type_info": "Numeric"
},
{
"ordinal": 7,
"name": "prev_close",
"type_info": "Numeric"
},
{
"ordinal": 8,
"name": "change",
"type_info": "Numeric"
},
{
"ordinal": 9,
"name": "change_percent",
"type_info": "Numeric"
},
{
"ordinal": 10,
"name": "volume",
"type_info": "Int8"
},
{
"ordinal": 11,
"name": "source",
"type_info": "Varchar"
},
{
"ordinal": 12,
"name": "updated_at",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Text",
"Text"
]
},
"nullable": [
false,
false,
false,
false,
true,
true,
true,
true,
true,
true,
true,
true,
false
]
},
"hash": "242e6f3319cfa0c19b53c4da80993a1da3cb77f58a3c0dac0260bf3adb4e501f"
}

View File

@ -0,0 +1,46 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT symbol, metric_name, period_date, value, source\n FROM time_series_financials\n WHERE symbol = $1\n ORDER BY period_date DESC\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "symbol",
"type_info": "Varchar"
},
{
"ordinal": 1,
"name": "metric_name",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "period_date",
"type_info": "Date"
},
{
"ordinal": 3,
"name": "value",
"type_info": "Numeric"
},
{
"ordinal": 4,
"name": "source",
"type_info": "Varchar"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false,
false,
false,
false,
true
]
},
"hash": "4536af5904df2b38a10e801f488cf2bd4176dccf06b0b791284d729f53ab262d"
}

View File

@ -0,0 +1,62 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO analysis_results (symbol, module_id, model_name, content, meta_data)\n VALUES ($1, $2, $3, $4, $5)\n RETURNING id, symbol, module_id, generated_at, model_name, content, meta_data\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "symbol",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "module_id",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "generated_at",
"type_info": "Timestamptz"
},
{
"ordinal": 4,
"name": "model_name",
"type_info": "Varchar"
},
{
"ordinal": 5,
"name": "content",
"type_info": "Text"
},
{
"ordinal": 6,
"name": "meta_data",
"type_info": "Jsonb"
}
],
"parameters": {
"Left": [
"Varchar",
"Varchar",
"Varchar",
"Text",
"Jsonb"
]
},
"nullable": [
false,
false,
false,
false,
true,
false,
true
]
},
"hash": "47dd5646e6a94d84da1db7e7aa5961ce012cf8467e5b98fc88f073f84ddd7b87"
}

View File

@ -0,0 +1,58 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT id, symbol, module_id, generated_at, model_name, content, meta_data\n FROM analysis_results\n WHERE symbol = $1\n ORDER BY generated_at DESC\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "symbol",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "module_id",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "generated_at",
"type_info": "Timestamptz"
},
{
"ordinal": 4,
"name": "model_name",
"type_info": "Varchar"
},
{
"ordinal": 5,
"name": "content",
"type_info": "Text"
},
{
"ordinal": 6,
"name": "meta_data",
"type_info": "Jsonb"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false,
false,
false,
false,
true,
false,
true
]
},
"hash": "5ddfe5e70c62b906ca23de28cd0056fa116a90f932567cefff259e110b6e9b1b"
}

View File

@ -0,0 +1,25 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO realtime_quotes (\n symbol, market, ts, price, open_price, high_price, low_price, prev_close, change, change_percent, volume, source, updated_at\n ) VALUES (\n $1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, NOW()\n )\n ON CONFLICT (symbol, market, ts) DO UPDATE SET\n price = EXCLUDED.price,\n open_price = EXCLUDED.open_price,\n high_price = EXCLUDED.high_price,\n low_price = EXCLUDED.low_price,\n prev_close = EXCLUDED.prev_close,\n change = EXCLUDED.change,\n change_percent = EXCLUDED.change_percent,\n volume = EXCLUDED.volume,\n source = EXCLUDED.source,\n updated_at = NOW()\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Varchar",
"Varchar",
"Timestamptz",
"Numeric",
"Numeric",
"Numeric",
"Numeric",
"Numeric",
"Numeric",
"Numeric",
"Int8",
"Varchar"
]
},
"nullable": []
},
"hash": "79ac63ac22399f0ba64783b87fbca6f7637c0f331c1346211ac5275e51221654"
}

View File

@ -0,0 +1,23 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO daily_market_data (symbol, trade_date, open_price, high_price, low_price, close_price, volume, pe, pb, total_mv)\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)\n ON CONFLICT (symbol, trade_date) DO UPDATE SET\n open_price = EXCLUDED.open_price,\n high_price = EXCLUDED.high_price,\n low_price = EXCLUDED.low_price,\n close_price = EXCLUDED.close_price,\n volume = EXCLUDED.volume,\n pe = EXCLUDED.pe,\n pb = EXCLUDED.pb,\n total_mv = EXCLUDED.total_mv\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Varchar",
"Date",
"Numeric",
"Numeric",
"Numeric",
"Numeric",
"Int8",
"Numeric",
"Numeric",
"Numeric"
]
},
"nullable": []
},
"hash": "7bc18e5f68bfc1455b7e6e74feacabb79121b6a8008c999852a9fae3a8396789"
}

View File

@ -0,0 +1,47 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT symbol, metric_name, period_date, value, source\n FROM time_series_financials\n WHERE symbol = $1 AND metric_name = ANY($2)\n ORDER BY period_date DESC\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "symbol",
"type_info": "Varchar"
},
{
"ordinal": 1,
"name": "metric_name",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "period_date",
"type_info": "Date"
},
{
"ordinal": 3,
"name": "value",
"type_info": "Numeric"
},
{
"ordinal": 4,
"name": "source",
"type_info": "Varchar"
}
],
"parameters": {
"Left": [
"Text",
"TextArray"
]
},
"nullable": [
false,
false,
false,
false,
true
]
},
"hash": "8868e58490b2f11be13c74ae3b1ce71a3f589b61d046815b6e9a7fe67ce94886"
}

View File

@ -0,0 +1,59 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT id, symbol, module_id, generated_at, model_name, content, meta_data\n FROM analysis_results\n WHERE symbol = $1 AND module_id = $2\n ORDER BY generated_at DESC\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "symbol",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "module_id",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "generated_at",
"type_info": "Timestamptz"
},
{
"ordinal": 4,
"name": "model_name",
"type_info": "Varchar"
},
{
"ordinal": 5,
"name": "content",
"type_info": "Text"
},
{
"ordinal": 6,
"name": "meta_data",
"type_info": "Jsonb"
}
],
"parameters": {
"Left": [
"Text",
"Text"
]
},
"nullable": [
false,
false,
false,
false,
true,
false,
true
]
},
"hash": "926e80040622e569d7698396e0126fecc648346e67ecae96cb191077737f5ab5"
}

View File

@ -0,0 +1,78 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT symbol, trade_date, open_price, high_price, low_price, close_price, volume, pe, pb, total_mv\n FROM daily_market_data\n WHERE symbol = $1\n AND ($2::DATE IS NULL OR trade_date >= $2)\n AND ($3::DATE IS NULL OR trade_date <= $3)\n ORDER BY trade_date DESC\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "symbol",
"type_info": "Varchar"
},
{
"ordinal": 1,
"name": "trade_date",
"type_info": "Date"
},
{
"ordinal": 2,
"name": "open_price",
"type_info": "Numeric"
},
{
"ordinal": 3,
"name": "high_price",
"type_info": "Numeric"
},
{
"ordinal": 4,
"name": "low_price",
"type_info": "Numeric"
},
{
"ordinal": 5,
"name": "close_price",
"type_info": "Numeric"
},
{
"ordinal": 6,
"name": "volume",
"type_info": "Int8"
},
{
"ordinal": 7,
"name": "pe",
"type_info": "Numeric"
},
{
"ordinal": 8,
"name": "pb",
"type_info": "Numeric"
},
{
"ordinal": 9,
"name": "total_mv",
"type_info": "Numeric"
}
],
"parameters": {
"Left": [
"Text",
"Date",
"Date"
]
},
"nullable": [
false,
false,
true,
true,
true,
true,
true,
true,
true,
true
]
},
"hash": "a487a815febf42b5c58fce44382f2d849f81b5831e733fc1d8faa62196f67dc9"
}

View File

@ -0,0 +1,52 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT symbol, name, industry, list_date, additional_info, updated_at\n FROM company_profiles\n WHERE symbol = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "symbol",
"type_info": "Varchar"
},
{
"ordinal": 1,
"name": "name",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "industry",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "list_date",
"type_info": "Date"
},
{
"ordinal": 4,
"name": "additional_info",
"type_info": "Jsonb"
},
{
"ordinal": 5,
"name": "updated_at",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false,
false,
true,
true,
true,
false
]
},
"hash": "a857a2bbeb2b7defebc976b472df1fd3b88ab154afe1d0d6ca044e616a75e60f"
}

View File

@ -0,0 +1,18 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO time_series_financials (symbol, metric_name, period_date, value, source)\n VALUES ($1, $2, $3, $4, $5)\n ON CONFLICT (symbol, metric_name, period_date) DO UPDATE SET\n value = EXCLUDED.value,\n source = EXCLUDED.source\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Varchar",
"Varchar",
"Date",
"Numeric",
"Varchar"
]
},
"nullable": []
},
"hash": "c08e82dfa0c325fe81baef633be7369ff6e4eb4534d00a41da94adfebbd44cc2"
}

View File

@ -0,0 +1,58 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT id, symbol, module_id, generated_at, model_name, content, meta_data\n FROM analysis_results\n WHERE id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "symbol",
"type_info": "Varchar"
},
{
"ordinal": 2,
"name": "module_id",
"type_info": "Varchar"
},
{
"ordinal": 3,
"name": "generated_at",
"type_info": "Timestamptz"
},
{
"ordinal": 4,
"name": "model_name",
"type_info": "Varchar"
},
{
"ordinal": 5,
"name": "content",
"type_info": "Text"
},
{
"ordinal": 6,
"name": "meta_data",
"type_info": "Jsonb"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
false,
false,
false,
true,
false,
true
]
},
"hash": "c3d06b1b669d66f82fd532a7bc782621101780f7f549852fc3b4405b477870af"
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,83 @@
[package]
name = "data-persistence-service"
version = "0.1.2"
edition = "2021"
authors = ["Lv, Qi <lvsoft@gmail.com>"]
default-run = "data-persistence-service-server"
[lib]
name = "data_persistence_service"
path = "src/lib.rs"
[[bin]]
name = "data-persistence-service-server"
path = "src/main.rs"
[[bin]]
name = "api-cli"
path = "src/bin/api-cli.rs"
# The cli feature is not yet compatible with the new architecture.
# required-features = ["service_kit/api-cli"]
[dependencies]
service_kit = { version = "0.1.2", default-features = true }
anyhow = "1.0"
rmcp = { version = "0.8.5", features = [
"transport-streamable-http-server",
"transport-worker"
] }
# Web framework
axum = "0.8"
tokio = { version = "1.0", features = ["full"] }
tower-http = { version = "0.6.6", features = ["cors", "trace"] }
tower = { version = "0.5", features = ["util"] }
# Observability
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter", "fmt"] }
# Serialization
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
# OpenAPI & Schema
utoipa = { version = "5.4", features = ["axum_extras", "chrono", "uuid"] }
utoipa-swagger-ui = { version = "9.0", features = ["axum"] }
# Environment variables
dotenvy = "0.15"
# Error Handling
thiserror = "2.0.17"
# Database
sqlx = { version = "0.8.6", features = [ "runtime-tokio-rustls", "postgres", "chrono", "uuid", "json", "rust_decimal" ] }
rust_decimal = { version = "1.36", features = ["serde"] }
chrono = { version = "0.4", features = ["serde"] }
uuid = { version = "1", features = ["serde", "v4"] }
# WASM CLI UI
rust-embed = "8.7"
axum-embed = "0.1.0"
[dev-dependencies]
http-body-util = "0.1"
tower = { version = "0.5", features = ["util"] }
# Feature 管理:默认全部启用,可选择性关闭
[features]
default = ["swagger-ui"]
swagger-ui = []
wasm-cli = []
# 让模板的 `mcp` 特性联动 service_kit 的 mcp 功能
mcp = ["service_kit/mcp"]
# 可选:透传 api-cli 给 service_kit
# api-cli = ["service_kit/api-cli"]
# --- For Local Development ---
# If you are developing `service_kit` locally, uncomment the following lines
# in your project's `.cargo/config.toml` file (create it if it doesn't exist)
# to make Cargo use your local version instead of the one from git.
#
# [patch.'https://github.com/lvsoft/service_kit']
# service_kit = { path = "../service_kit" } # Note: Adjust the path if your directory structure is different.

View File

@ -0,0 +1,26 @@
FROM rust:1.90-bookworm AS chef
WORKDIR /app
RUN cargo install cargo-chef
FROM chef AS planner
COPY . .
RUN cargo chef prepare --recipe-path recipe.json
FROM chef AS builder
ENV SQLX_OFFLINE=true
COPY --from=planner /app/recipe.json /app/recipe.json
RUN cargo chef cook --release --recipe-path /app/recipe.json
COPY . .
RUN cargo build --release --bin data-persistence-service-server
FROM debian:bookworm-slim AS runtime
WORKDIR /app
RUN groupadd --system --gid 1001 appuser && \
useradd --system --uid 1001 --gid 1001 appuser
USER appuser
COPY --from=builder /app/target/release/data-persistence-service-server /usr/local/bin/data-persistence-service-server
COPY ./migrations ./migrations
ENV HOST=0.0.0.0
ENV PORT=3000
EXPOSE 3000
ENTRYPOINT ["/usr/local/bin/data-persistence-service-server"]

View File

@ -0,0 +1,67 @@
# 数据持久化服务 (Data Persistence Service)
本服务是“基本面分析”微服务架构中数据库的唯一所有者,为所有数据持久化需求提供一个 RESTful API。
## 概览
- **语言**: Rust
- **框架**: Axum
- **数据库**: PostgreSQL (带有 TimescaleDB 扩展)
- **核心任务**: 为数据库提供一个稳定、高性能且类型安全的 API 层。
## 本地开发指南
### 1. 先决条件
- Rust 工具链 (`rustup`)
- `sqlx-cli` (`cargo install sqlx-cli`)
- 一个正在运行的、并已启用 TimescaleDB 扩展的 PostgreSQL 实例。
### 2. 配置
`env.sample` 文件复制为 `.env`,并根据您的本地环境配置 `DATABASE_URL`
```bash
cp env.sample .env
```
您的 `.env` 文件应如下所示:
```ini
# 服务监听的端口
PORT=3000
# 用于 sqlx 连接数据库的 URL
# 请确保用户、密码、主机、端口和数据库名称都正确无误
DATABASE_URL=postgres://user:password@localhost:5432/fundamental_analysis
```
### 3. 数据库迁移
在首次运行本服务之前,或在任何数据库结构变更之后,请运行迁移命令以更新数据库:
```bash
sqlx migrate run
```
### 4. 运行服务
编译并运行本服务:
```bash
cargo run
```
服务将会启动并在您 `.env` 文件中指定的端口(默认为 3000上监听。服务的 OpenAPI 规范 (Swagger JSON) 将在 `/api-docs/openapi.json` 路径下可用。
## 测试
要运行所有测试(包括数据库集成测试和 API 集成测试),请使用以下命令。请确保您的 `.env` 文件中的 `DATABASE_URL` 指向一个有效的、已应用迁移的测试数据库。
```bash
cargo test
```
如果需要查看详细的测试输出,可以使用:
```bash
cargo test -- --nocapture
```

View File

@ -0,0 +1,86 @@
# WASM CLI - API调用功能已实现
## 🎉 问题已解决
之前WASM CLI只显示"Successfully matched command"而不执行实际API调用的问题已经修复
## 🔧 修复内容
1. **实现了真正的HTTP API调用**: 使用JavaScript的fetch API替代了原来的命令匹配功能
2. **添加了WASM绑定**: 通过web-sys和wasm-bindgen-futures实现异步HTTP请求
3. **修复了依赖冲突**: 通过特性门控解决了reqwest在WASM环境下的兼容性问题
4. **新增异步API**: `run_command_async()` 函数现在可以真正执行API请求并返回结果
## 📋 主要更改
### 1. 新的初始化函数
```javascript
// 旧版本
init_cli(spec_json)
// 新版本 - 需要同时传递OpenAPI规范和base URL
init_cli(spec_json, base_url)
```
### 2. 新的异步命令执行函数
```javascript
// 新增 - 真正执行API调用
const result = await run_command_async("v1.hello.get");
// 旧版本 - 已废弃,只返回错误信息
const result = run_command("v1.hello.get");
```
## 🚀 使用方法
### 1. 初始化CLI
```javascript
import init, { init_cli, run_command_async } from './pkg/forge_cli_wasm.js';
// 初始化WASM模块
await init();
// 获取OpenAPI规范
const response = await fetch('http://localhost:3000/api-docs/openapi.json');
const spec = await response.text();
// 初始化CLI
init_cli(spec, 'http://localhost:3000');
```
### 2. 执行API命令
```javascript
// 执行GET请求
const result1 = await run_command_async("v1.hello.get");
// 执行带参数的请求
const result2 = await run_command_async("v1.add.get --a 1 --b 2");
// 执行POST请求如果API支持
const result3 = await run_command_async('v1.create.post --body \'{"name": "test"}\'');
```
## 🧪 测试
打开 `test.html` 文件在浏览器中测试:
1. 确保你的服务已运行在 http://localhost:3000
2. 点击 "Initialize CLI" 按钮
3. 输入命令如 "v1.hello.get" 或 "v1.add.get --a 1 --b 2"
4. 点击 "Run Command" 按钮
5. 查看实际的API响应结果
## ⚠️ 重要注意事项
1. **旧的`run_command`函数已废弃**: 请使用新的`run_command_async`函数
2. **需要CORS支持**: 确保你的API服务器支持跨域请求
3. **异步操作**: 所有API调用现在都是异步的需要使用`await`
4. **错误处理**: API请求失败时会返回错误信息而不是抛出异常
## 🔍 调试
- 打开浏览器开发者工具查看控制台日志
- 网络请求会显示在Network标签页中
- 任何错误都会在输出区域显示
现在你的WASM CLI可以真正与API进行交互不再只是"匹配命令"了!🎉

View File

@ -0,0 +1,659 @@
let wasm;
function addToExternrefTable0(obj) {
const idx = wasm.__externref_table_alloc();
wasm.__wbindgen_export_2.set(idx, obj);
return idx;
}
function handleError(f, args) {
try {
return f.apply(this, args);
} catch (e) {
const idx = addToExternrefTable0(e);
wasm.__wbindgen_exn_store(idx);
}
}
const cachedTextDecoder = (typeof TextDecoder !== 'undefined' ? new TextDecoder('utf-8', { ignoreBOM: true, fatal: true }) : { decode: () => { throw Error('TextDecoder not available') } } );
if (typeof TextDecoder !== 'undefined') { cachedTextDecoder.decode(); };
let cachedUint8ArrayMemory0 = null;
function getUint8ArrayMemory0() {
if (cachedUint8ArrayMemory0 === null || cachedUint8ArrayMemory0.byteLength === 0) {
cachedUint8ArrayMemory0 = new Uint8Array(wasm.memory.buffer);
}
return cachedUint8ArrayMemory0;
}
function getStringFromWasm0(ptr, len) {
ptr = ptr >>> 0;
return cachedTextDecoder.decode(getUint8ArrayMemory0().subarray(ptr, ptr + len));
}
function isLikeNone(x) {
return x === undefined || x === null;
}
const CLOSURE_DTORS = (typeof FinalizationRegistry === 'undefined')
? { register: () => {}, unregister: () => {} }
: new FinalizationRegistry(state => {
wasm.__wbindgen_export_3.get(state.dtor)(state.a, state.b)
});
function makeMutClosure(arg0, arg1, dtor, f) {
const state = { a: arg0, b: arg1, cnt: 1, dtor };
const real = (...args) => {
// First up with a closure we increment the internal reference
// count. This ensures that the Rust closure environment won't
// be deallocated while we're invoking it.
state.cnt++;
const a = state.a;
state.a = 0;
try {
return f(a, state.b, ...args);
} finally {
if (--state.cnt === 0) {
wasm.__wbindgen_export_3.get(state.dtor)(a, state.b);
CLOSURE_DTORS.unregister(state);
} else {
state.a = a;
}
}
};
real.original = state;
CLOSURE_DTORS.register(real, state, state);
return real;
}
function debugString(val) {
// primitive types
const type = typeof val;
if (type == 'number' || type == 'boolean' || val == null) {
return `${val}`;
}
if (type == 'string') {
return `"${val}"`;
}
if (type == 'symbol') {
const description = val.description;
if (description == null) {
return 'Symbol';
} else {
return `Symbol(${description})`;
}
}
if (type == 'function') {
const name = val.name;
if (typeof name == 'string' && name.length > 0) {
return `Function(${name})`;
} else {
return 'Function';
}
}
// objects
if (Array.isArray(val)) {
const length = val.length;
let debug = '[';
if (length > 0) {
debug += debugString(val[0]);
}
for(let i = 1; i < length; i++) {
debug += ', ' + debugString(val[i]);
}
debug += ']';
return debug;
}
// Test for built-in
const builtInMatches = /\[object ([^\]]+)\]/.exec(toString.call(val));
let className;
if (builtInMatches && builtInMatches.length > 1) {
className = builtInMatches[1];
} else {
// Failed to match the standard '[object ClassName]'
return toString.call(val);
}
if (className == 'Object') {
// we're a user defined class or Object
// JSON.stringify avoids problems with cycles, and is generally much
// easier than looping through ownProperties of `val`.
try {
return 'Object(' + JSON.stringify(val) + ')';
} catch (_) {
return 'Object';
}
}
// errors
if (val instanceof Error) {
return `${val.name}: ${val.message}\n${val.stack}`;
}
// TODO we could test for more things here, like `Set`s and `Map`s.
return className;
}
let WASM_VECTOR_LEN = 0;
const cachedTextEncoder = (typeof TextEncoder !== 'undefined' ? new TextEncoder('utf-8') : { encode: () => { throw Error('TextEncoder not available') } } );
const encodeString = (typeof cachedTextEncoder.encodeInto === 'function'
? function (arg, view) {
return cachedTextEncoder.encodeInto(arg, view);
}
: function (arg, view) {
const buf = cachedTextEncoder.encode(arg);
view.set(buf);
return {
read: arg.length,
written: buf.length
};
});
function passStringToWasm0(arg, malloc, realloc) {
if (realloc === undefined) {
const buf = cachedTextEncoder.encode(arg);
const ptr = malloc(buf.length, 1) >>> 0;
getUint8ArrayMemory0().subarray(ptr, ptr + buf.length).set(buf);
WASM_VECTOR_LEN = buf.length;
return ptr;
}
let len = arg.length;
let ptr = malloc(len, 1) >>> 0;
const mem = getUint8ArrayMemory0();
let offset = 0;
for (; offset < len; offset++) {
const code = arg.charCodeAt(offset);
if (code > 0x7F) break;
mem[ptr + offset] = code;
}
if (offset !== len) {
if (offset !== 0) {
arg = arg.slice(offset);
}
ptr = realloc(ptr, len, len = offset + arg.length * 3, 1) >>> 0;
const view = getUint8ArrayMemory0().subarray(ptr + offset, ptr + len);
const ret = encodeString(arg, view);
offset += ret.written;
ptr = realloc(ptr, len, offset, 1) >>> 0;
}
WASM_VECTOR_LEN = offset;
return ptr;
}
let cachedDataViewMemory0 = null;
function getDataViewMemory0() {
if (cachedDataViewMemory0 === null || cachedDataViewMemory0.buffer.detached === true || (cachedDataViewMemory0.buffer.detached === undefined && cachedDataViewMemory0.buffer !== wasm.memory.buffer)) {
cachedDataViewMemory0 = new DataView(wasm.memory.buffer);
}
return cachedDataViewMemory0;
}
function takeFromExternrefTable0(idx) {
const value = wasm.__wbindgen_export_2.get(idx);
wasm.__externref_table_dealloc(idx);
return value;
}
/**
* @param {string} spec_json
* @param {string} base_url
*/
export function init_cli(spec_json, base_url) {
const ptr0 = passStringToWasm0(spec_json, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);
const len0 = WASM_VECTOR_LEN;
const ptr1 = passStringToWasm0(base_url, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);
const len1 = WASM_VECTOR_LEN;
const ret = wasm.init_cli(ptr0, len0, ptr1, len1);
if (ret[1]) {
throw takeFromExternrefTable0(ret[0]);
}
}
/**
* @param {string} command_line
* @returns {Promise<any>}
*/
export function run_command_async(command_line) {
const ptr0 = passStringToWasm0(command_line, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);
const len0 = WASM_VECTOR_LEN;
const ret = wasm.run_command_async(ptr0, len0);
return ret;
}
/**
* @param {string} _command_line
* @returns {string}
*/
export function run_command(_command_line) {
let deferred2_0;
let deferred2_1;
try {
const ptr0 = passStringToWasm0(_command_line, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);
const len0 = WASM_VECTOR_LEN;
const ret = wasm.run_command(ptr0, len0);
deferred2_0 = ret[0];
deferred2_1 = ret[1];
return getStringFromWasm0(ret[0], ret[1]);
} finally {
wasm.__wbindgen_free(deferred2_0, deferred2_1, 1);
}
}
/**
* 获取Tab补全建议
* @param {string} line
* @param {number} cursor_pos
* @returns {CompletionResult}
*/
export function get_completions(line, cursor_pos) {
const ptr0 = passStringToWasm0(line, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);
const len0 = WASM_VECTOR_LEN;
const ret = wasm.get_completions(ptr0, len0, cursor_pos);
return CompletionResult.__wrap(ret);
}
/**
* 获取历史记录
* @returns {string}
*/
export function get_history() {
let deferred1_0;
let deferred1_1;
try {
const ret = wasm.get_history();
deferred1_0 = ret[0];
deferred1_1 = ret[1];
return getStringFromWasm0(ret[0], ret[1]);
} finally {
wasm.__wbindgen_free(deferred1_0, deferred1_1, 1);
}
}
/**
* 根据索引获取历史记录项 (0为最新负数从后往前)
* @param {number} index
* @returns {string | undefined}
*/
export function get_history_item(index) {
const ret = wasm.get_history_item(index);
let v1;
if (ret[0] !== 0) {
v1 = getStringFromWasm0(ret[0], ret[1]).slice();
wasm.__wbindgen_free(ret[0], ret[1] * 1, 1);
}
return v1;
}
/**
* 在历史记录中搜索 (类似Ctrl+r功能)
* @param {string} query
* @returns {string}
*/
export function search_history(query) {
let deferred2_0;
let deferred2_1;
try {
const ptr0 = passStringToWasm0(query, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);
const len0 = WASM_VECTOR_LEN;
const ret = wasm.search_history(ptr0, len0);
deferred2_0 = ret[0];
deferred2_1 = ret[1];
return getStringFromWasm0(ret[0], ret[1]);
} finally {
wasm.__wbindgen_free(deferred2_0, deferred2_1, 1);
}
}
/**
* 清空历史记录
*/
export function clear_history() {
wasm.clear_history();
}
function __wbg_adapter_22(arg0, arg1, arg2) {
wasm.closure108_externref_shim(arg0, arg1, arg2);
}
function __wbg_adapter_68(arg0, arg1, arg2, arg3) {
wasm.closure130_externref_shim(arg0, arg1, arg2, arg3);
}
const CompletionResultFinalization = (typeof FinalizationRegistry === 'undefined')
? { register: () => {}, unregister: () => {} }
: new FinalizationRegistry(ptr => wasm.__wbg_completionresult_free(ptr >>> 0, 1));
/**
* 补全建议的JSON表示用于与JavaScript交互
*/
export class CompletionResult {
static __wrap(ptr) {
ptr = ptr >>> 0;
const obj = Object.create(CompletionResult.prototype);
obj.__wbg_ptr = ptr;
CompletionResultFinalization.register(obj, obj.__wbg_ptr, obj);
return obj;
}
__destroy_into_raw() {
const ptr = this.__wbg_ptr;
this.__wbg_ptr = 0;
CompletionResultFinalization.unregister(this);
return ptr;
}
free() {
const ptr = this.__destroy_into_raw();
wasm.__wbg_completionresult_free(ptr, 0);
}
/**
* @returns {string}
*/
get suggestions() {
let deferred1_0;
let deferred1_1;
try {
const ret = wasm.completionresult_suggestions(this.__wbg_ptr);
deferred1_0 = ret[0];
deferred1_1 = ret[1];
return getStringFromWasm0(ret[0], ret[1]);
} finally {
wasm.__wbindgen_free(deferred1_0, deferred1_1, 1);
}
}
}
async function __wbg_load(module, imports) {
if (typeof Response === 'function' && module instanceof Response) {
if (typeof WebAssembly.instantiateStreaming === 'function') {
try {
return await WebAssembly.instantiateStreaming(module, imports);
} catch (e) {
if (module.headers.get('Content-Type') != 'application/wasm') {
console.warn("`WebAssembly.instantiateStreaming` failed because your server does not serve Wasm with `application/wasm` MIME type. Falling back to `WebAssembly.instantiate` which is slower. Original error:\n", e);
} else {
throw e;
}
}
}
const bytes = await module.arrayBuffer();
return await WebAssembly.instantiate(bytes, imports);
} else {
const instance = await WebAssembly.instantiate(module, imports);
if (instance instanceof WebAssembly.Instance) {
return { instance, module };
} else {
return instance;
}
}
}
function __wbg_get_imports() {
const imports = {};
imports.wbg = {};
imports.wbg.__wbg_call_672a4d21634d4a24 = function() { return handleError(function (arg0, arg1) {
const ret = arg0.call(arg1);
return ret;
}, arguments) };
imports.wbg.__wbg_call_7cccdd69e0791ae2 = function() { return handleError(function (arg0, arg1, arg2) {
const ret = arg0.call(arg1, arg2);
return ret;
}, arguments) };
imports.wbg.__wbg_fetch_b7bf320f681242d2 = function(arg0, arg1) {
const ret = arg0.fetch(arg1);
return ret;
};
imports.wbg.__wbg_instanceof_Response_f2cc20d9f7dfd644 = function(arg0) {
let result;
try {
result = arg0 instanceof Response;
} catch (_) {
result = false;
}
const ret = result;
return ret;
};
imports.wbg.__wbg_instanceof_Window_def73ea0955fc569 = function(arg0) {
let result;
try {
result = arg0 instanceof Window;
} catch (_) {
result = false;
}
const ret = result;
return ret;
};
imports.wbg.__wbg_log_a793dbed77c682d9 = function(arg0, arg1) {
console.log(getStringFromWasm0(arg0, arg1));
};
imports.wbg.__wbg_new_018dcc2d6c8c2f6a = function() { return handleError(function () {
const ret = new Headers();
return ret;
}, arguments) };
imports.wbg.__wbg_new_23a2665fac83c611 = function(arg0, arg1) {
try {
var state0 = {a: arg0, b: arg1};
var cb0 = (arg0, arg1) => {
const a = state0.a;
state0.a = 0;
try {
return __wbg_adapter_68(a, state0.b, arg0, arg1);
} finally {
state0.a = a;
}
};
const ret = new Promise(cb0);
return ret;
} finally {
state0.a = state0.b = 0;
}
};
imports.wbg.__wbg_new_405e22f390576ce2 = function() {
const ret = new Object();
return ret;
};
imports.wbg.__wbg_newnoargs_105ed471475aaf50 = function(arg0, arg1) {
const ret = new Function(getStringFromWasm0(arg0, arg1));
return ret;
};
imports.wbg.__wbg_newwithstrandinit_06c535e0a867c635 = function() { return handleError(function (arg0, arg1, arg2) {
const ret = new Request(getStringFromWasm0(arg0, arg1), arg2);
return ret;
}, arguments) };
imports.wbg.__wbg_queueMicrotask_97d92b4fcc8a61c5 = function(arg0) {
queueMicrotask(arg0);
};
imports.wbg.__wbg_queueMicrotask_d3219def82552485 = function(arg0) {
const ret = arg0.queueMicrotask;
return ret;
};
imports.wbg.__wbg_resolve_4851785c9c5f573d = function(arg0) {
const ret = Promise.resolve(arg0);
return ret;
};
imports.wbg.__wbg_set_11cd83f45504cedf = function() { return handleError(function (arg0, arg1, arg2, arg3, arg4) {
arg0.set(getStringFromWasm0(arg1, arg2), getStringFromWasm0(arg3, arg4));
}, arguments) };
imports.wbg.__wbg_setbody_5923b78a95eedf29 = function(arg0, arg1) {
arg0.body = arg1;
};
imports.wbg.__wbg_setheaders_834c0bdb6a8949ad = function(arg0, arg1) {
arg0.headers = arg1;
};
imports.wbg.__wbg_setmethod_3c5280fe5d890842 = function(arg0, arg1, arg2) {
arg0.method = getStringFromWasm0(arg1, arg2);
};
imports.wbg.__wbg_static_accessor_GLOBAL_88a902d13a557d07 = function() {
const ret = typeof global === 'undefined' ? null : global;
return isLikeNone(ret) ? 0 : addToExternrefTable0(ret);
};
imports.wbg.__wbg_static_accessor_GLOBAL_THIS_56578be7e9f832b0 = function() {
const ret = typeof globalThis === 'undefined' ? null : globalThis;
return isLikeNone(ret) ? 0 : addToExternrefTable0(ret);
};
imports.wbg.__wbg_static_accessor_SELF_37c5d418e4bf5819 = function() {
const ret = typeof self === 'undefined' ? null : self;
return isLikeNone(ret) ? 0 : addToExternrefTable0(ret);
};
imports.wbg.__wbg_static_accessor_WINDOW_5de37043a91a9c40 = function() {
const ret = typeof window === 'undefined' ? null : window;
return isLikeNone(ret) ? 0 : addToExternrefTable0(ret);
};
imports.wbg.__wbg_status_f6360336ca686bf0 = function(arg0) {
const ret = arg0.status;
return ret;
};
imports.wbg.__wbg_text_7805bea50de2af49 = function() { return handleError(function (arg0) {
const ret = arg0.text();
return ret;
}, arguments) };
imports.wbg.__wbg_then_44b73946d2fb3e7d = function(arg0, arg1) {
const ret = arg0.then(arg1);
return ret;
};
imports.wbg.__wbg_then_48b406749878a531 = function(arg0, arg1, arg2) {
const ret = arg0.then(arg1, arg2);
return ret;
};
imports.wbg.__wbindgen_cb_drop = function(arg0) {
const obj = arg0.original;
if (obj.cnt-- == 1) {
obj.a = 0;
return true;
}
const ret = false;
return ret;
};
imports.wbg.__wbindgen_closure_wrapper648 = function(arg0, arg1, arg2) {
const ret = makeMutClosure(arg0, arg1, 109, __wbg_adapter_22);
return ret;
};
imports.wbg.__wbindgen_debug_string = function(arg0, arg1) {
const ret = debugString(arg1);
const ptr1 = passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);
const len1 = WASM_VECTOR_LEN;
getDataViewMemory0().setInt32(arg0 + 4 * 1, len1, true);
getDataViewMemory0().setInt32(arg0 + 4 * 0, ptr1, true);
};
imports.wbg.__wbindgen_init_externref_table = function() {
const table = wasm.__wbindgen_export_2;
const offset = table.grow(4);
table.set(0, undefined);
table.set(offset + 0, undefined);
table.set(offset + 1, null);
table.set(offset + 2, true);
table.set(offset + 3, false);
;
};
imports.wbg.__wbindgen_is_function = function(arg0) {
const ret = typeof(arg0) === 'function';
return ret;
};
imports.wbg.__wbindgen_is_undefined = function(arg0) {
const ret = arg0 === undefined;
return ret;
};
imports.wbg.__wbindgen_string_get = function(arg0, arg1) {
const obj = arg1;
const ret = typeof(obj) === 'string' ? obj : undefined;
var ptr1 = isLikeNone(ret) ? 0 : passStringToWasm0(ret, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc);
var len1 = WASM_VECTOR_LEN;
getDataViewMemory0().setInt32(arg0 + 4 * 1, len1, true);
getDataViewMemory0().setInt32(arg0 + 4 * 0, ptr1, true);
};
imports.wbg.__wbindgen_string_new = function(arg0, arg1) {
const ret = getStringFromWasm0(arg0, arg1);
return ret;
};
imports.wbg.__wbindgen_throw = function(arg0, arg1) {
throw new Error(getStringFromWasm0(arg0, arg1));
};
return imports;
}
function __wbg_init_memory(imports, memory) {
}
function __wbg_finalize_init(instance, module) {
wasm = instance.exports;
__wbg_init.__wbindgen_wasm_module = module;
cachedDataViewMemory0 = null;
cachedUint8ArrayMemory0 = null;
wasm.__wbindgen_start();
return wasm;
}
function initSync(module) {
if (wasm !== undefined) return wasm;
if (typeof module !== 'undefined') {
if (Object.getPrototypeOf(module) === Object.prototype) {
({module} = module)
} else {
console.warn('using deprecated parameters for `initSync()`; pass a single object instead')
}
}
const imports = __wbg_get_imports();
__wbg_init_memory(imports);
if (!(module instanceof WebAssembly.Module)) {
module = new WebAssembly.Module(module);
}
const instance = new WebAssembly.Instance(module, imports);
return __wbg_finalize_init(instance, module);
}
async function __wbg_init(module_or_path) {
if (wasm !== undefined) return wasm;
if (typeof module_or_path !== 'undefined') {
if (Object.getPrototypeOf(module_or_path) === Object.prototype) {
({module_or_path} = module_or_path)
} else {
console.warn('using deprecated parameters for the initialization function; pass a single object instead')
}
}
if (typeof module_or_path === 'undefined') {
module_or_path = new URL('forge_cli_wasm_bg.wasm', import.meta.url);
}
const imports = __wbg_get_imports();
if (typeof module_or_path === 'string' || (typeof Request === 'function' && module_or_path instanceof Request) || (typeof URL === 'function' && module_or_path instanceof URL)) {
module_or_path = fetch(module_or_path);
}
__wbg_init_memory(imports);
const { instance, module } = await __wbg_load(await module_or_path, imports);
return __wbg_finalize_init(instance, module);
}
export { initSync };
export default __wbg_init;

View File

@ -0,0 +1,15 @@
<!DOCTYPE html>
<html>
<head>
<title>Forge CLI (WASM)</title>
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/xterm@5.3.0/css/xterm.css" />
<link rel="stylesheet" href="/cli-ui/style.css" />
<script src="https://cdn.jsdelivr.net/npm/xterm@5.3.0/lib/xterm.js"></script>
<script src="https://cdn.jsdelivr.net/npm/xterm-addon-fit@0.8.0/lib/xterm-addon-fit.js"></script>
</head>
<body>
<h1>Forge CLI (WASM Interface)</h1>
<div id="terminal"></div>
<script type="module" src="/cli-ui/main.js"></script>
</body>
</html>

View File

@ -0,0 +1,383 @@
import init, {
init_cli,
run_command_async,
get_completions,
get_history_item,
search_history
} from '/cli-ui/forge_cli_wasm.js';
async function main() {
// 1. Initialize xterm.js
const term = new Terminal({
cursorBlink: true,
theme: {
background: '#1e1e1e',
foreground: '#d4d4d4',
},
cols: 120, // Set a reasonable terminal width
scrollback: 1000,
convertEol: true, // Convert \n to \r\n for proper line endings
});
const fitAddon = new FitAddon.FitAddon();
term.loadAddon(fitAddon);
term.open(document.getElementById('terminal'));
fitAddon.fit();
window.addEventListener('resize', () => fitAddon.fit());
term.writeln('Welcome to the Forge CLI (WASM Interface)');
term.writeln('------------------------------------------');
term.writeln('');
try {
// 2. Load and initialize the WASM module
term.write('Loading WASM module...');
await init();
term.writeln('\r✅ WASM module loaded successfully.');
// 3. Fetch OpenAPI spec and initialize the CLI
const baseUrl = window.location.origin; // 动态获取base URL
term.write(`Fetching OpenAPI spec from ${baseUrl}/api-docs/openapi.json...`);
const response = await fetch(`${baseUrl}/api-docs/openapi.json`);
if (!response.ok) {
throw new Error(`Failed to fetch spec: ${response.statusText}`);
}
const specJson = await response.text();
const spec = JSON.parse(specJson);
// 保存到全局以便 JS fallback 使用
window.__openapiSpec = spec;
window.__baseUrl = baseUrl;
init_cli(specJson, baseUrl);
term.writeln('\r✅ CLI initialized with OpenAPI spec.');
} catch (e) {
term.writeln(`\r\n❌ Error during initialization: ${e}`);
return;
}
// 4. Implement the REPL with enhanced functionality
let currentLine = '';
let cursorPosition = 0; // 光标在当前行中的位置
let historyIndex = -1; // -1表示当前输入>=0表示历史记录索引
let isInReverseSearch = false;
let reverseSearchQuery = '';
let completionMenu = null; // 当前显示的补全菜单
const prompt = '\r\n$ ';
const promptOnly = '$ '; // 不包含换行的提示符,用于重绘
// 重绘当前行
function redrawLine() {
// 移动到行首并清除从提示符后的所有内容
term.write('\r' + promptOnly);
term.write('\x1b[K'); // 清除从光标到行尾的内容
if (isInReverseSearch) {
// 在反向搜索模式下,替换整个提示符
term.write('\r\x1b[K'); // 清除整行
term.write(`(reverse-i-search)'${reverseSearchQuery}': ${currentLine}`);
} else {
term.write(currentLine);
}
// 移动光标到正确位置
if (cursorPosition < currentLine.length) {
const moveCursor = currentLine.length - cursorPosition;
term.write('\x1b[' + moveCursor + 'D'); // 向左移动光标
}
}
// 插入字符到当前位置
function insertChar(char) {
currentLine = currentLine.slice(0, cursorPosition) + char + currentLine.slice(cursorPosition);
cursorPosition++;
redrawLine();
}
// 删除字符
function deleteChar() {
if (cursorPosition > 0) {
currentLine = currentLine.slice(0, cursorPosition - 1) + currentLine.slice(cursorPosition);
cursorPosition--;
redrawLine();
}
}
// 移动光标
function moveCursor(direction) {
if (direction === 'left' && cursorPosition > 0) {
cursorPosition--;
term.write('\x1b[D');
} else if (direction === 'right' && cursorPosition < currentLine.length) {
cursorPosition++;
term.write('\x1b[C');
}
}
// 处理Tab补全
function handleTabCompletion() {
try {
const completionResult = get_completions(currentLine, cursorPosition);
const suggestions = JSON.parse(completionResult.suggestions);
if (suggestions.length === 0) {
return;
}
if (suggestions.length === 1) {
// 只有一个建议,直接补全
const suggestion = suggestions[0];
const beforeCursor = currentLine.slice(0, suggestion.start_pos);
const afterCursor = currentLine.slice(suggestion.end_pos);
currentLine = beforeCursor + suggestion.value + afterCursor;
cursorPosition = beforeCursor.length + suggestion.value.length;
redrawLine();
} else {
// 多个建议,显示补全菜单
term.writeln('');
suggestions.slice(0, 10).forEach(suggestion => {
const desc = suggestion.description ? ` - ${suggestion.description}` : '';
term.writeln(` ${suggestion.value}${desc}`);
});
redrawLine();
}
} catch (e) {
console.error('Tab completion error:', e);
}
}
// 处理历史记录导航
function navigateHistory(direction) {
if (direction === 'up') {
const item = get_history_item(historyIndex + 1);
if (item) {
historyIndex++;
currentLine = item;
cursorPosition = currentLine.length;
redrawLine();
}
} else if (direction === 'down') {
if (historyIndex > 0) {
historyIndex--;
const item = get_history_item(historyIndex);
if (item) {
currentLine = item;
cursorPosition = currentLine.length;
redrawLine();
}
} else if (historyIndex === 0) {
historyIndex = -1;
currentLine = '';
cursorPosition = 0;
redrawLine();
}
}
}
// 处理反向搜索
function handleReverseSearch(char) {
if (char) {
reverseSearchQuery += char;
}
try {
const searchResults = JSON.parse(search_history(reverseSearchQuery));
if (searchResults.length > 0) {
currentLine = searchResults[0];
cursorPosition = currentLine.length;
}
redrawLine();
} catch (e) {
console.error('Reverse search error:', e);
}
}
// 退出反向搜索模式
function exitReverseSearch() {
isInReverseSearch = false;
reverseSearchQuery = '';
cursorPosition = currentLine.length;
redrawLine();
}
// JS fallback当 wasm 返回 Path not found 时,用 JS 直接按 OpenAPI 执行
async function executeCommandJS(commandLine) {
try {
const spec = window.__openapiSpec;
const baseUrl = window.__baseUrl || '';
if (!spec) return 'Error: OpenAPI spec not loaded.';
const tokens = commandLine.match(/(?:[^\s"]+|"[^"]*")+/g) || [];
if (tokens.length === 0) return '';
const cmd = tokens[0];
const args = {};
for (let i = 1; i < tokens.length; i++) {
const t = tokens[i];
if (t.startsWith('--')) {
const key = t.replace(/^--/, '');
const val = (i + 1 < tokens.length && !tokens[i + 1].startsWith('--')) ? tokens[++i] : '';
args[key] = val.replace(/^"|"$/g, '');
}
}
const parts = cmd.split('.');
const method = parts.pop().toUpperCase();
const cmdSegs = parts;
// 匹配路径模板
let matched = null;
for (const [key, item] of Object.entries(spec.paths || {})) {
const keySegs = key.split('/').filter(s => s);
if (keySegs.length !== cmdSegs.length) continue;
let ok = true;
for (let i = 0; i < keySegs.length; i++) {
const ks = keySegs[i];
const cs = cmdSegs[i];
const isParam = ks.startsWith('{') && ks.endsWith('}');
if (!isParam && ks !== cs) { ok = false; break; }
}
if (ok) { matched = [key, item]; break; }
}
if (!matched) {
return `API request failed (JS fallback): Path not found for /${cmdSegs.join('/')}`;
}
const [pathTemplate, pathItem] = matched;
const op = (pathItem[method.toLowerCase()]);
if (!op) return `API request failed (JS fallback): Operation not found for ${cmd}`;
// 构造路径和查询
let finalPath = pathTemplate;
const used = new Set();
if (Array.isArray(op.parameters)) {
for (const p of op.parameters) {
const prm = p && p.name ? p : (p && p.$ref ? null : null);
if (!prm) continue;
if (p.in === 'path' && args[p.name] != null) {
finalPath = finalPath.replace(`{${p.name}}`, encodeURIComponent(args[p.name]));
used.add(p.name);
}
}
}
const query = [];
for (const [k, v] of Object.entries(args)) {
if (!used.has(k)) query.push(`${encodeURIComponent(k)}=${encodeURIComponent(v)}`);
}
let serverUrl = '';
if (Array.isArray(spec.servers) && spec.servers.length > 0 && spec.servers[0].url) {
serverUrl = spec.servers[0].url;
}
const url = `${baseUrl}${serverUrl}${finalPath}${query.length ? ('?' + query.join('&')) : ''}`;
const resp = await fetch(url, { method });
const text = await resp.text();
try {
return JSON.stringify(JSON.parse(text), null, 2);
} catch {
return text;
}
} catch (e) {
return `API request failed (JS fallback): ${e}`;
}
}
term.write(prompt);
term.onKey(({ key, domEvent }) => {
const { keyCode, ctrlKey, altKey, metaKey } = domEvent;
// Ctrl+R - 反向搜索
if (ctrlKey && keyCode === 82 && !isInReverseSearch) {
isInReverseSearch = true;
reverseSearchQuery = '';
currentLine = '';
cursorPosition = 0;
redrawLine();
return;
}
// 在反向搜索模式下的处理
if (isInReverseSearch) {
if (keyCode === 13) { // Enter - 接受搜索结果
exitReverseSearch();
return;
} else if (keyCode === 27) { // Esc - 取消搜索
isInReverseSearch = false;
reverseSearchQuery = '';
currentLine = '';
cursorPosition = 0;
redrawLine();
return;
} else if (keyCode === 8) { // Backspace - 删除搜索字符
if (reverseSearchQuery.length > 0) {
reverseSearchQuery = reverseSearchQuery.slice(0, -1);
handleReverseSearch();
}
return;
} else if (!ctrlKey && !altKey && !metaKey && key.length === 1) {
handleReverseSearch(key);
return;
}
return;
}
// 普通模式下的处理
if (keyCode === 13) { // Enter - 执行命令
if (currentLine.trim()) {
term.writeln('');
// 异步执行命令
(async () => {
try {
let result = await run_command_async(currentLine);
const plain = String(result);
if (plain.includes('Path not found for')) {
result = await executeCommandJS(currentLine);
}
// 清理ANSI转义序列
const cleanResult = String(result)
.replace(/\x1b\[[0-9;]*m/g, '')
.replace(/\x1b\[[0-9]*[A-Za-z]/g, '')
.replace(/\[\d+m/g, '');
const lines = cleanResult.split('\n');
lines.forEach((line, index) => {
if (index === lines.length - 1 && line === '') {
return;
}
term.writeln(line);
});
} catch (error) {
term.writeln(`Error: ${error}`);
}
term.write(prompt);
})();
currentLine = '';
cursorPosition = 0;
historyIndex = -1;
} else {
term.write(prompt);
}
} else if (keyCode === 9) { // Tab - 补全
domEvent.preventDefault();
handleTabCompletion();
} else if (keyCode === 8) { // Backspace
deleteChar();
} else if (keyCode === 37) { // 左箭头
moveCursor('left');
} else if (keyCode === 39) { // 右箭头
moveCursor('right');
} else if (keyCode === 38) { // 上箭头 - 历史记录上一个
navigateHistory('up');
} else if (keyCode === 40) { // 下箭头 - 历史记录下一个
navigateHistory('down');
} else if (keyCode === 36) { // Home - 移到行首
cursorPosition = 0;
redrawLine();
} else if (keyCode === 35) { // End - 移到行尾
cursorPosition = currentLine.length;
redrawLine();
} else if (!ctrlKey && !altKey && !metaKey && key.length === 1) {
// 普通字符输入
insertChar(key);
}
});
}
main();

View File

@ -0,0 +1,12 @@
{
"name": "forge-cli-wasm",
"version": "0.1.2",
"files": [
"forge_cli_wasm_bg.wasm",
"forge_cli_wasm.js",
"forge_cli_wasm.d.ts"
],
"module": "forge_cli_wasm.js",
"types": "forge_cli_wasm.d.ts",
"sideEffects": false
}

View File

@ -0,0 +1,28 @@
body, html {
margin: 0;
padding: 0;
height: 100%;
background-color: #1e1e1e;
color: #d4d4d4;
font-family: Menlo, Monaco, 'Courier New', monospace;
}
h1 {
padding: 10px 20px;
margin: 0;
font-size: 1.2em;
border-bottom: 1px solid #333;
}
#terminal {
width: calc(100% - 40px);
height: calc(100vh - 80px); /* Adjust based on h1 height */
padding: 20px;
font-size: 14px; /* Ensure consistent font size */
line-height: 1.4; /* Better line spacing */
}
.xterm .xterm-viewport {
width: 100% !important;
}

View File

@ -0,0 +1,16 @@
# The port the server will listen on.
# If not set, defaults to 3000.
# Copy this file to .env and change the port if needed.
PORT=3001
HOST=0.0.0.0
# Local development database URL (TimescaleDB/PostgreSQL)
# Example below matches: docker run -p 15435:5432 -e POSTGRES_DB=fundamental -e POSTGRES_USER=postgres -e POSTGRES_PASSWORD=postgres timescale/timescaledb:2.15.2-pg16
DATABASE_URL=postgres://postgres:postgres@127.0.0.1:15435/fundamental
# SQLx offline mode toggle. Set to true if you generated sqlx-data.json via `sqlx prepare`.
SQLX_OFFLINE=false
# 在测试/CI中跳过 HTTP 监听(默认不跳过)
# 置为 1/true/yes/on 时main 不会绑定端口与监听
# SKIP_SERVER_LISTEN=true

View File

@ -0,0 +1,94 @@
use crate::{
db,
dtos::{AnalysisResultDto, NewAnalysisResultDto},
AppState, ServerError,
};
use axum::{
extract::{Path, Query, State},
Json,
};
use serde::Deserialize;
use service_kit::api;
use uuid::Uuid;
use tracing::info;
#[derive(Deserialize, utoipa::ToSchema)]
pub struct AnalysisQuery {
pub symbol: String,
pub module_id: Option<String>,
}
#[api(POST, "/api/v1/analysis-results", output(detail = "AnalysisResultDto"))]
pub async fn create_analysis_result(
State(state): State<AppState>,
Json(payload): Json<NewAnalysisResultDto>,
) -> Result<Json<AnalysisResultDto>, ServerError> {
info!(target: "api", symbol = %payload.symbol, module_id = %payload.module_id, "POST /analysis-results → create_analysis_result called");
let new_result = db::create_analysis_result(&state.pool, &payload).await?;
// Convert model to DTO
let dto = AnalysisResultDto {
id: new_result.id,
symbol: new_result.symbol,
module_id: new_result.module_id,
generated_at: new_result.generated_at,
model_name: new_result.model_name,
content: new_result.content,
meta_data: new_result.meta_data,
};
info!(target: "api", id = %dto.id, symbol = %dto.symbol, module_id = %dto.module_id, "create_analysis_result completed");
Ok(Json(dto))
}
#[api(GET, "/api/v1/analysis-results", output(list = "AnalysisResultDto"))]
pub async fn get_analysis_results(
State(state): State<AppState>,
Query(query): Query<AnalysisQuery>,
) -> Result<Json<Vec<AnalysisResultDto>>, ServerError> {
info!(target: "api", symbol = %query.symbol, module_id = ?query.module_id, "GET /analysis-results → get_analysis_results called");
let results = db::get_analysis_results(&state.pool, &query.symbol, query.module_id.as_deref()).await?;
// Convert Vec<Model> to Vec<Dto>
let dtos: Vec<AnalysisResultDto> = results
.into_iter()
.map(|r| AnalysisResultDto {
id: r.id,
symbol: r.symbol,
module_id: r.module_id,
generated_at: r.generated_at,
model_name: r.model_name,
content: r.content,
meta_data: r.meta_data,
})
.collect();
info!(target: "api", count = dtos.len(), symbol = %query.symbol, "get_analysis_results completed");
Ok(Json(dtos))
}
#[api(GET, "/api/v1/analysis-results/{id}", output(detail = "AnalysisResultDto"))]
pub async fn get_analysis_result_by_id(
State(state): State<AppState>,
Path(id): Path<String>,
) -> Result<Json<AnalysisResultDto>, ServerError> {
let parsed = Uuid::parse_str(&id).map_err(|e| ServerError::Anyhow(e.into()))?;
info!(target: "api", id = %id, "GET /analysis-results/{{id}} → get_analysis_result_by_id called");
let result = db::get_analysis_result_by_id(&state.pool, parsed)
.await?
.ok_or_else(|| ServerError::NotFound(format!("Analysis result with id '{}' not found", id)))?;
// Convert model to DTO
let dto = AnalysisResultDto {
id: result.id,
symbol: result.symbol,
module_id: result.module_id,
generated_at: result.generated_at,
model_name: result.model_name,
content: result.content,
meta_data: result.meta_data,
};
info!(target: "api", id = %dto.id, symbol = %dto.symbol, module_id = %dto.module_id, "get_analysis_result_by_id completed");
Ok(Json(dto))
}

View File

@ -0,0 +1,45 @@
use crate::{
db,
dtos::CompanyProfileDto,
AppState, ServerError,
};
use axum::{
extract::{Path, State},
Json,
};
use service_kit::api;
use tracing::info;
#[api(PUT, "/api/v1/companies")]
pub async fn upsert_company(
State(state): State<AppState>,
Json(payload): Json<CompanyProfileDto>,
) -> Result<(), ServerError> {
info!(target: "api", symbol = %payload.symbol, "PUT /companies → upsert_company called");
db::upsert_company(&state.pool, &payload).await?;
info!(target: "api", symbol = %payload.symbol, "upsert_company completed");
Ok(())
}
#[api(GET, "/api/v1/companies/{symbol}")]
pub async fn get_company_by_symbol(
State(state): State<AppState>,
Path(symbol): Path<String>,
) -> Result<Json<CompanyProfileDto>, ServerError> {
info!(target: "api", symbol = %symbol, "GET /companies/{{symbol}} → get_company_by_symbol called");
let company = db::get_company_by_symbol(&state.pool, &symbol)
.await?
.ok_or_else(|| ServerError::NotFound(format!("Company with symbol '{}' not found", symbol)))?;
// Convert from model to DTO
let dto = CompanyProfileDto {
symbol: company.symbol,
name: company.name,
industry: company.industry,
list_date: company.list_date,
additional_info: company.additional_info,
};
info!(target: "api", symbol = %dto.symbol, "get_company_by_symbol completed");
Ok(Json(dto))
}

View File

@ -0,0 +1,159 @@
use crate::{
db,
dtos::{DailyMarketDataDto, TimeSeriesFinancialDto, RealtimeQuoteDto},
AppState, ServerError,
};
use axum::{
extract::{Path, Query, State},
Json,
};
use chrono::NaiveDate;
use serde::Deserialize;
use service_kit::api;
use tracing::info;
#[derive(Deserialize, utoipa::ToSchema)]
pub struct FinancialsQuery {
metrics: Option<String>,
}
#[api(POST, "/api/v1/market-data/financials/batch")]
pub async fn batch_insert_financials(
State(state): State<AppState>,
Json(payload): Json<crate::dtos::TimeSeriesFinancialBatchDto>,
) -> Result<axum::http::StatusCode, ServerError> {
info!(target: "api", count = payload.records.len(), "POST /market-data/financials/batch → batch_insert_financials called");
db::batch_insert_financials(&state.pool, &payload.records).await?;
info!(target: "api", count = payload.records.len(), "batch_insert_financials completed");
Ok(axum::http::StatusCode::CREATED)
}
#[api(GET, "/api/v1/market-data/financials/{symbol}", output(list = "TimeSeriesFinancialDto"))]
pub async fn get_financials_by_symbol(
State(state): State<AppState>,
Path(symbol): Path<String>,
Query(query): Query<FinancialsQuery>,
) -> Result<Json<Vec<TimeSeriesFinancialDto>>, ServerError> {
info!(target: "api", symbol = %symbol, metrics = ?query.metrics, "GET /market-data/financials/{{symbol}} → get_financials_by_symbol called");
let metrics = query.metrics.map(|s| s.split(',').map(String::from).collect());
let financials = db::get_financials_by_symbol(&state.pool, &symbol, metrics).await?;
// Convert Vec<Model> to Vec<Dto>
let dtos: Vec<TimeSeriesFinancialDto> = financials
.into_iter()
.map(|f| TimeSeriesFinancialDto {
symbol: f.symbol,
metric_name: f.metric_name,
period_date: f.period_date,
value: f.value.try_into().unwrap_or(0.0), // Simplified conversion
source: f.source,
})
.collect();
info!(target: "api", symbol = %symbol, items = dtos.len(), "get_financials_by_symbol completed");
Ok(Json(dtos))
}
// =================================================================================
// Realtime Quotes
// =================================================================================
#[derive(Deserialize, utoipa::ToSchema)]
pub struct RealtimeQuery {
pub market: String,
pub max_age_seconds: Option<i64>,
}
#[api(POST, "/api/v1/market-data/quotes")]
pub async fn upsert_realtime_quote(
State(state): State<AppState>,
Json(quote): Json<RealtimeQuoteDto>,
) -> Result<axum::http::StatusCode, ServerError> {
info!(target: "api", symbol = %quote.symbol, market = %quote.market, "POST /market-data/quotes → upsert_realtime_quote called");
db::insert_realtime_quote(&state.pool, &quote).await?;
Ok(axum::http::StatusCode::CREATED)
}
#[api(GET, "/api/v1/market-data/quotes/{symbol}", output(detail = "RealtimeQuoteDto"))]
pub async fn get_latest_realtime_quote(
State(state): State<AppState>,
Path(symbol): Path<String>,
Query(q): Query<RealtimeQuery>,
) -> Result<Json<RealtimeQuoteDto>, ServerError> {
let market = q.market.clone();
info!(target: "api", symbol = %symbol, market = %market, "GET /market-data/quotes/{{market}}/{{symbol}} → get_latest_realtime_quote called");
if let Some(rec) = db::get_latest_realtime_quote(&state.pool, &market, &symbol).await? {
if let Some(max_age) = q.max_age_seconds {
let cutoff = chrono::Utc::now() - chrono::Duration::seconds(max_age);
if rec.ts < cutoff {
return Err(ServerError::NotFound("stale or missing quote".into()));
}
}
let dto = RealtimeQuoteDto {
symbol: rec.symbol,
market: rec.market,
ts: rec.ts,
price: rec.price.try_into().unwrap_or(0.0),
open_price: rec.open_price.and_then(|v| v.try_into().ok()),
high_price: rec.high_price.and_then(|v| v.try_into().ok()),
low_price: rec.low_price.and_then(|v| v.try_into().ok()),
prev_close: rec.prev_close.and_then(|v| v.try_into().ok()),
change: rec.change.and_then(|v| v.try_into().ok()),
change_percent: rec.change_percent.and_then(|v| v.try_into().ok()),
volume: rec.volume,
source: rec.source,
};
Ok(Json(dto))
} else {
Err(ServerError::NotFound("quote not found".into()))
}
}
#[derive(Deserialize, utoipa::ToSchema)]
pub struct DailyDataQuery {
start_date: Option<NaiveDate>,
end_date: Option<NaiveDate>,
}
#[api(POST, "/api/v1/market-data/daily/batch")]
pub async fn batch_insert_daily_data(
State(state): State<AppState>,
Json(payload): Json<crate::dtos::DailyMarketDataBatchDto>,
) -> Result<axum::http::StatusCode, ServerError> {
info!(target: "api", count = payload.records.len(), "POST /market-data/daily/batch → batch_insert_daily_data called");
db::batch_insert_daily_data(&state.pool, &payload.records).await?;
info!(target: "api", count = payload.records.len(), "batch_insert_daily_data completed");
Ok(axum::http::StatusCode::CREATED)
}
#[api(GET, "/api/v1/market-data/daily/{symbol}", output(list = "DailyMarketDataDto"))]
pub async fn get_daily_data_by_symbol(
State(state): State<AppState>,
Path(symbol): Path<String>,
Query(query): Query<DailyDataQuery>,
) -> Result<Json<Vec<DailyMarketDataDto>>, ServerError> {
info!(target: "api", symbol = %symbol, start = ?query.start_date, end = ?query.end_date, "GET /market-data/daily/{{symbol}} → get_daily_data_by_symbol called");
let daily_data =
db::get_daily_data_by_symbol(&state.pool, &symbol, query.start_date, query.end_date)
.await?;
// Convert Vec<Model> to Vec<Dto>
let dtos: Vec<DailyMarketDataDto> = daily_data
.into_iter()
.map(|d| DailyMarketDataDto {
symbol: d.symbol,
trade_date: d.trade_date,
open_price: d.open_price.and_then(|p| p.try_into().ok()),
high_price: d.high_price.and_then(|p| p.try_into().ok()),
low_price: d.low_price.and_then(|p| p.try_into().ok()),
close_price: d.close_price.and_then(|p| p.try_into().ok()),
volume: d.volume,
pe: d.pe.and_then(|p| p.try_into().ok()),
pb: d.pb.and_then(|p| p.try_into().ok()),
total_mv: d.total_mv.and_then(|p| p.try_into().ok()),
})
.collect();
info!(target: "api", symbol = %symbol, items = dtos.len(), "get_daily_data_by_symbol completed");
Ok(Json(dtos))
}

View File

@ -0,0 +1,5 @@
// This module will contain all the API handler definitions
// which are then collected by the `inventory` crate.
pub mod companies;
pub mod market_data;
pub mod analysis;

View File

@ -0,0 +1,3 @@
fn main() {
eprintln!("This project uses 'cargo forge api-cli' provided by service_kit.\nRun: cargo forge api-cli --url <URL> [subcommand]");
}

View File

@ -0,0 +1,341 @@
// This module contains all the database interaction logic,
// using `sqlx` to query the PostgreSQL database.
//
// Functions in this module will be called by the API handlers
// to fetch or store data.
use crate::{
dtos::{CompanyProfileDto, DailyMarketDataDto, NewAnalysisResultDto, TimeSeriesFinancialDto, RealtimeQuoteDto},
models::{AnalysisResult, CompanyProfile, DailyMarketData, TimeSeriesFinancial, RealtimeQuote},
};
use anyhow::Result;
use sqlx::PgPool;
use rust_decimal::Decimal;
use chrono::NaiveDate;
use tracing::info;
/// Upserts a company profile into the database.
/// If a company with the same symbol already exists, it will be updated.
/// Otherwise, a new record will be inserted.
pub async fn upsert_company(pool: &PgPool, company: &CompanyProfileDto) -> Result<()> {
info!(target: "db", symbol = %company.symbol, "DB upsert_company started");
sqlx::query!(
r#"
INSERT INTO company_profiles (symbol, name, industry, list_date, additional_info, updated_at)
VALUES ($1, $2, $3, $4, $5, NOW())
ON CONFLICT (symbol) DO UPDATE SET
name = EXCLUDED.name,
industry = EXCLUDED.industry,
list_date = EXCLUDED.list_date,
additional_info = EXCLUDED.additional_info,
updated_at = NOW()
"#,
company.symbol,
company.name,
company.industry,
company.list_date,
company.additional_info,
)
.execute(pool)
.await?;
info!(target: "db", symbol = %company.symbol, "DB upsert_company finished");
Ok(())
}
/// Fetches a single company profile by its symbol.
pub async fn get_company_by_symbol(pool: &PgPool, symbol: &str) -> Result<Option<CompanyProfile>> {
info!(target: "db", symbol = %symbol, "DB get_company_by_symbol started");
let company = sqlx::query_as!(
CompanyProfile,
r#"
SELECT symbol, name, industry, list_date, additional_info, updated_at
FROM company_profiles
WHERE symbol = $1
"#,
symbol
)
.fetch_optional(pool)
.await?;
info!(target: "db", symbol = %symbol, found = company.is_some(), "DB get_company_by_symbol finished");
Ok(company)
}
// =================================================================================
// Market Data Functions (Task T3.2)
// =================================================================================
pub async fn batch_insert_financials(pool: &PgPool, financials: &[TimeSeriesFinancialDto]) -> Result<()> {
info!(target: "db", count = financials.len(), "DB batch_insert_financials started");
// Note: This is a simple iterative approach. For very high throughput,
// a single COPY statement or sqlx's `copy` module would be more performant.
for financial in financials {
sqlx::query!(
r#"
INSERT INTO time_series_financials (symbol, metric_name, period_date, value, source)
VALUES ($1, $2, $3, $4, $5)
ON CONFLICT (symbol, metric_name, period_date) DO UPDATE SET
value = EXCLUDED.value,
source = EXCLUDED.source
"#,
financial.symbol,
financial.metric_name,
financial.period_date,
Decimal::from_f64_retain(financial.value).expect("invalid decimal conversion from f64"), // Convert f64 to Decimal
financial.source,
)
.execute(pool)
.await?;
}
info!(target: "db", count = financials.len(), "DB batch_insert_financials finished");
Ok(())
}
pub async fn get_financials_by_symbol(
pool: &PgPool,
symbol: &str,
metrics: Option<Vec<String>>,
) -> Result<Vec<TimeSeriesFinancial>> {
info!(target: "db", symbol = %symbol, has_metrics = metrics.as_ref().map(|m| !m.is_empty()).unwrap_or(false), "DB get_financials_by_symbol started");
let results = if let Some(metrics) = metrics {
sqlx::query_as!(
TimeSeriesFinancial,
r#"
SELECT symbol, metric_name, period_date, value, source
FROM time_series_financials
WHERE symbol = $1 AND metric_name = ANY($2)
ORDER BY period_date DESC
"#,
symbol,
&metrics
)
.fetch_all(pool)
.await?
} else {
sqlx::query_as!(
TimeSeriesFinancial,
r#"
SELECT symbol, metric_name, period_date, value, source
FROM time_series_financials
WHERE symbol = $1
ORDER BY period_date DESC
"#,
symbol
)
.fetch_all(pool)
.await?
};
info!(target: "db", symbol = %symbol, items = results.len(), "DB get_financials_by_symbol finished");
Ok(results)
}
pub async fn batch_insert_daily_data(pool: &PgPool, daily_data: &[DailyMarketDataDto]) -> Result<()> {
info!(target: "db", count = daily_data.len(), "DB batch_insert_daily_data started");
for data in daily_data {
sqlx::query!(
r#"
INSERT INTO daily_market_data (symbol, trade_date, open_price, high_price, low_price, close_price, volume, pe, pb, total_mv)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
ON CONFLICT (symbol, trade_date) DO UPDATE SET
open_price = EXCLUDED.open_price,
high_price = EXCLUDED.high_price,
low_price = EXCLUDED.low_price,
close_price = EXCLUDED.close_price,
volume = EXCLUDED.volume,
pe = EXCLUDED.pe,
pb = EXCLUDED.pb,
total_mv = EXCLUDED.total_mv
"#,
data.symbol,
data.trade_date,
data.open_price.and_then(Decimal::from_f64_retain),
data.high_price.and_then(Decimal::from_f64_retain),
data.low_price.and_then(Decimal::from_f64_retain),
data.close_price.and_then(Decimal::from_f64_retain),
data.volume,
data.pe.and_then(Decimal::from_f64_retain),
data.pb.and_then(Decimal::from_f64_retain),
data.total_mv.and_then(Decimal::from_f64_retain),
)
.execute(pool)
.await?;
}
info!(target: "db", count = daily_data.len(), "DB batch_insert_daily_data finished");
Ok(())
}
pub async fn get_daily_data_by_symbol(
pool: &PgPool,
symbol: &str,
start_date: Option<NaiveDate>,
end_date: Option<NaiveDate>,
) -> Result<Vec<DailyMarketData>> {
// This query is simplified. A real-world scenario might need more complex date filtering.
info!(target: "db", symbol = %symbol, start = ?start_date, end = ?end_date, "DB get_daily_data_by_symbol started");
let daily_data = sqlx::query_as!(
DailyMarketData,
r#"
SELECT symbol, trade_date, open_price, high_price, low_price, close_price, volume, pe, pb, total_mv
FROM daily_market_data
WHERE symbol = $1
AND ($2::DATE IS NULL OR trade_date >= $2)
AND ($3::DATE IS NULL OR trade_date <= $3)
ORDER BY trade_date DESC
"#,
symbol,
start_date,
end_date
)
.fetch_all(pool)
.await?;
info!(target: "db", symbol = %symbol, items = daily_data.len(), "DB get_daily_data_by_symbol finished");
Ok(daily_data)
}
// =================================================================================
// Realtime Quotes Functions
// =================================================================================
pub async fn insert_realtime_quote(pool: &PgPool, quote: &RealtimeQuoteDto) -> Result<()> {
info!(target: "db", symbol = %quote.symbol, market = %quote.market, ts = %quote.ts, "DB insert_realtime_quote started");
sqlx::query!(
r#"
INSERT INTO realtime_quotes (
symbol, market, ts, price, open_price, high_price, low_price, prev_close, change, change_percent, volume, source, updated_at
) VALUES (
$1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, NOW()
)
ON CONFLICT (symbol, market, ts) DO UPDATE SET
price = EXCLUDED.price,
open_price = EXCLUDED.open_price,
high_price = EXCLUDED.high_price,
low_price = EXCLUDED.low_price,
prev_close = EXCLUDED.prev_close,
change = EXCLUDED.change,
change_percent = EXCLUDED.change_percent,
volume = EXCLUDED.volume,
source = EXCLUDED.source,
updated_at = NOW()
"#,
quote.symbol,
quote.market,
quote.ts,
Decimal::from_f64_retain(quote.price).expect("invalid price"),
quote.open_price.and_then(Decimal::from_f64_retain),
quote.high_price.and_then(Decimal::from_f64_retain),
quote.low_price.and_then(Decimal::from_f64_retain),
quote.prev_close.and_then(Decimal::from_f64_retain),
quote.change.and_then(Decimal::from_f64_retain),
quote.change_percent.and_then(Decimal::from_f64_retain),
quote.volume,
quote.source.as_deref(),
)
.execute(pool)
.await?;
info!(target: "db", symbol = %quote.symbol, market = %quote.market, "DB insert_realtime_quote finished");
Ok(())
}
pub async fn get_latest_realtime_quote(pool: &PgPool, market: &str, symbol: &str) -> Result<Option<RealtimeQuote>> {
info!(target: "db", symbol = %symbol, market = %market, "DB get_latest_realtime_quote started");
let rec = sqlx::query_as!(
RealtimeQuote,
r#"
SELECT symbol, market, ts, price, open_price, high_price, low_price, prev_close, change, change_percent, volume, source, updated_at
FROM realtime_quotes
WHERE symbol = $1 AND market = $2
ORDER BY ts DESC
LIMIT 1
"#,
symbol,
market
)
.fetch_optional(pool)
.await?;
info!(target: "db", symbol = %symbol, market = %market, found = rec.is_some(), "DB get_latest_realtime_quote finished");
Ok(rec)
}
// =================================================================================
// Analysis Results Functions (Task T3.3)
// =================================================================================
pub async fn create_analysis_result(pool: &PgPool, result: &NewAnalysisResultDto) -> Result<AnalysisResult> {
info!(target: "db", symbol = %result.symbol, module_id = %result.module_id, "DB create_analysis_result started");
let new_result = sqlx::query_as!(
AnalysisResult,
r#"
INSERT INTO analysis_results (symbol, module_id, model_name, content, meta_data)
VALUES ($1, $2, $3, $4, $5)
RETURNING id, symbol, module_id, generated_at, model_name, content, meta_data
"#,
result.symbol,
result.module_id,
result.model_name,
result.content,
result.meta_data,
)
.fetch_one(pool)
.await?;
info!(target: "db", id = %new_result.id, symbol = %new_result.symbol, module_id = %new_result.module_id, "DB create_analysis_result finished");
Ok(new_result)
}
pub async fn get_analysis_results(
pool: &PgPool,
symbol: &str,
module_id: Option<&str>,
) -> Result<Vec<AnalysisResult>> {
info!(target: "db", symbol = %symbol, module = ?module_id, "DB get_analysis_results started");
let results = if let Some(module) = module_id {
sqlx::query_as!(
AnalysisResult,
r#"
SELECT id, symbol, module_id, generated_at, model_name, content, meta_data
FROM analysis_results
WHERE symbol = $1 AND module_id = $2
ORDER BY generated_at DESC
"#,
symbol,
module
)
.fetch_all(pool)
.await?
} else {
sqlx::query_as!(
AnalysisResult,
r#"
SELECT id, symbol, module_id, generated_at, model_name, content, meta_data
FROM analysis_results
WHERE symbol = $1
ORDER BY generated_at DESC
"#,
symbol
)
.fetch_all(pool)
.await?
};
info!(target: "db", symbol = %symbol, items = results.len(), "DB get_analysis_results finished");
Ok(results)
}
pub async fn get_analysis_result_by_id(pool: &PgPool, id: uuid::Uuid) -> Result<Option<AnalysisResult>> {
info!(target: "db", id = %id, "DB get_analysis_result_by_id started");
let result = sqlx::query_as!(
AnalysisResult,
r#"
SELECT id, symbol, module_id, generated_at, model_name, content, meta_data
FROM analysis_results
WHERE id = $1
"#,
id
)
.fetch_optional(pool)
.await?;
info!(target: "db", id = %id, found = result.is_some(), "DB get_analysis_result_by_id finished");
Ok(result)
}

View File

@ -0,0 +1,99 @@
use chrono::NaiveDate;
use service_kit::api_dto;
use serde_json::Value as JsonValue;
use uuid::Uuid;
// =================================================================================
// Companies API DTOs (Task T3.1)
// =================================================================================
#[api_dto]
pub struct CompanyProfileDto {
pub symbol: String,
pub name: String,
pub industry: Option<String>,
pub list_date: Option<NaiveDate>,
pub additional_info: Option<JsonValue>,
}
// =================================================================================
// Market Data API DTOs (Task T3.2)
// =================================================================================
#[api_dto]
pub struct TimeSeriesFinancialDto {
pub symbol: String,
pub metric_name: String,
pub period_date: NaiveDate,
pub value: f64, // Using f64 for simplicity in DTOs, will be handled as Decimal in db
pub source: Option<String>,
}
#[api_dto]
pub struct DailyMarketDataDto {
pub symbol: String,
pub trade_date: NaiveDate,
pub open_price: Option<f64>,
pub high_price: Option<f64>,
pub low_price: Option<f64>,
pub close_price: Option<f64>,
pub volume: Option<i64>,
pub pe: Option<f64>,
pub pb: Option<f64>,
pub total_mv: Option<f64>,
}
// Batch DTOs to satisfy #[api] macro restriction on Json<Vec<T>> in request bodies
#[api_dto]
pub struct TimeSeriesFinancialBatchDto {
pub records: Vec<TimeSeriesFinancialDto>,
}
#[api_dto]
pub struct DailyMarketDataBatchDto {
pub records: Vec<DailyMarketDataDto>,
}
// =================================================================================
// Analysis Results API DTOs (Task T3.3)
// =================================================================================
#[api_dto]
pub struct NewAnalysisResultDto {
pub symbol: String,
pub module_id: String,
pub model_name: Option<String>,
pub content: String,
pub meta_data: Option<JsonValue>,
}
#[api_dto]
pub struct AnalysisResultDto {
pub id: Uuid,
pub symbol: String,
pub module_id: String,
pub generated_at: chrono::DateTime<chrono::Utc>,
pub model_name: Option<String>,
pub content: String,
pub meta_data: Option<JsonValue>,
}
// =================================================================================
// Realtime Quotes DTOs
// =================================================================================
#[api_dto]
pub struct RealtimeQuoteDto {
pub symbol: String,
pub market: String,
pub ts: chrono::DateTime<chrono::Utc>,
pub price: f64,
pub open_price: Option<f64>,
pub high_price: Option<f64>,
pub low_price: Option<f64>,
pub prev_close: Option<f64>,
pub change: Option<f64>,
pub change_percent: Option<f64>,
pub volume: Option<i64>,
pub source: Option<String>,
}

View File

@ -0,0 +1,9 @@
// This file will contain the Axum handlers for our API endpoints.
//
// Each handler will correspond to a specific route and HTTP method,
// and will be annotated with the `#[api]` macro from `service_kit`
// to automatically generate the OpenAPI specification.
/// A dummy function to ensure the linker includes this module,
/// allowing the `inventory` crate to discover API handlers.
pub fn load() {}

View File

@ -0,0 +1,112 @@
use axum::Router;
use service_kit::{rest_router_builder::RestRouterBuilder};
use rmcp::transport::streamable_http_server::{session::local::LocalSessionManager, StreamableHttpService};
use rust_embed::RustEmbed;
use tower_http::cors::{Any, CorsLayer};
use utoipa::openapi::OpenApi;
use thiserror::Error;
use anyhow::Error as AnyhowError;
use axum::http::StatusCode;
use sqlx::PgPool;
use axum::response::IntoResponse;
use axum::Json;
#[cfg(feature = "swagger-ui")]
use utoipa_swagger_ui::SwaggerUi;
#[cfg(feature = "wasm-cli")]
use axum_embed::ServeEmbed;
pub mod dtos;
pub mod handlers;
pub mod mcp_server;
pub mod models;
pub mod db;
pub mod api;
#[cfg(feature = "wasm-cli")]
#[derive(RustEmbed, Clone)]
#[folder = "assets/"]
struct Assets;
pub fn build_openapi_spec() -> OpenApi {
// Keep this in sync with the template's package version for clarity.
service_kit::openapi_utils::build_openapi_basic("data-persistence-service", env!("CARGO_PKG_VERSION"), "data-persistence-service API", "App")
}
/// 仅注册 handlers让 inventory 完整。
pub fn load() { handlers::load(); }
/// 构建 REST 路由(不启动服务,不绑定端口)。
pub fn build_rest_router(openapi: OpenApi) -> service_kit::error::Result<Router> {
RestRouterBuilder::new().openapi(openapi).build()
}
/// 构建带状态的 REST 路由(便于测试)
pub fn build_rest_router_with_state(openapi: OpenApi, state: crate::AppState) -> service_kit::error::Result<Router> {
RestRouterBuilder::new()
.openapi(openapi)
.with_state(state)
.build()
}
/// 构建 Swagger UI用户自行 merge 到 app
#[cfg(feature = "swagger-ui")]
pub fn build_swagger_ui(openapi: OpenApi) -> SwaggerUi {
SwaggerUi::new("/swagger-ui").url("/api-docs/openapi.json", openapi)
}
/// 构建 CLI WASM 资源路由(/cli-ui
#[cfg(feature = "wasm-cli")]
pub fn build_cli_assets_router() -> Router {
Router::new().nest_service("/cli-ui", ServeEmbed::<Assets>::new())
}
/// 构建一个常用的 CORS Layer可选
pub fn default_cors_layer() -> CorsLayer {
CorsLayer::new().allow_origin(Any).allow_methods(Any).allow_headers(Any)
}
/// 构建 MCP Tool 服务(需启用 mcp 特性)。
#[cfg(feature = "mcp")]
pub fn build_mcp_service(openapi: OpenApi) -> service_kit::error::Result<StreamableHttpService<mcp_server::McpServerImpl>> {
let mcp_tool_router = service_kit::bootstrap::mcp_router_from_openapi::<mcp_server::McpServerImpl>(openapi)?;
let mcp_server = mcp_server::McpServerImpl::new(mcp_tool_router);
let svc = StreamableHttpService::new(
move || Ok(mcp_server.clone()),
LocalSessionManager::default().into(),
Default::default(),
);
Ok(svc)
}
// ==== Shared App State & Error types (used by both lib and bin) ====
#[derive(Clone)]
pub struct AppState {
pub(crate) pool: PgPool,
}
impl AppState {
pub fn new(pool: PgPool) -> Self { Self { pool } }
pub fn pool(&self) -> &PgPool { &self.pool }
}
#[derive(Debug, Error)]
pub enum ServerError {
#[error("An internal server error occurred")]
Anyhow(#[from] AnyhowError),
#[error("Not found: {0}")]
NotFound(String),
}
impl IntoResponse for ServerError {
fn into_response(self) -> axum::response::Response {
let (status, error_message) = match self {
ServerError::Anyhow(e) => {
eprintln!("Internal server error: {:?}", e);
(StatusCode::INTERNAL_SERVER_ERROR, "Internal server error".to_string())
}
ServerError::NotFound(message) => (StatusCode::NOT_FOUND, message),
};
let body = Json(serde_json::json!({ "error": error_message }));
(status, body).into_response()
}
}

View File

@ -0,0 +1,73 @@
use data_persistence_service as app;
use axum::Router;
use sqlx::PgPool;
use thiserror::Error;
use tracing_subscriber::{EnvFilter, fmt::SubscriberBuilder};
use tower_http::trace::TraceLayer;
#[tokio::main]
pub async fn main() {
dotenvy::dotenv().ok();
app::load(); // This single function now ensures all handlers are linked.
// 初始化 tracing 日志(默认 INFO可通过 RUST_LOG 覆盖)
let _ = SubscriberBuilder::default()
.with_env_filter(EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("info")))
.with_target(true)
.compact()
.try_init();
let db_url = std::env::var("DATABASE_URL").expect("DATABASE_URL must be set");
let pool = PgPool::connect(&db_url).await.expect("Failed to connect to database");
let state = app::AppState::new(pool);
let openapi = app::build_openapi_spec();
// 使用 RestRouterBuilder 并注入全局状态(通过内部 Any 保存,无需将 Router 设为带泛型状态)
let rest: Router = app::build_rest_router_with_state(openapi.clone(), state.clone())
.expect("build rest router");
#[cfg(feature = "swagger-ui")]
let swagger = app::build_swagger_ui(openapi.clone());
#[cfg(feature = "wasm-cli")]
let cli_assets = app::build_cli_assets_router();
let mut app_router: Router = Router::new()
.merge(rest)
.layer(app::default_cors_layer())
.layer(TraceLayer::new_for_http());
#[cfg(feature = "swagger-ui")]
{
app_router = app_router.merge(swagger);
}
#[cfg(feature = "wasm-cli")]
{
app_router = app_router.merge(cli_assets);
}
#[cfg(feature = "mcp")]
{
let mcp = app::build_mcp_service(openapi.clone()).expect("build mcp service");
app_router = app_router.nest_service("/mcp", mcp);
}
let address =
std::env::var("HOST").unwrap_or_else(|_| "0.0.0.0".into()) + ":" + &std::env::var("PORT").unwrap_or_else(|_| "3000".into());
println!("🚀 Server running at http://{}", address);
println!("📚 Swagger UI available at http://{}/swagger-ui", address);
println!("💻 Forge CLI UI available at http://{}/cli-ui", address);
let skip_listen = std::env::var("SKIP_SERVER_LISTEN")
.map(|v| {
let v = v.to_ascii_lowercase();
v == "1" || v == "true" || v == "yes" || v == "on"
})
.unwrap_or(false);
if skip_listen {
println!("🧪 SKIP_SERVER_LISTEN=1 → 跳过启动监听,仅用于测试/构建环境。");
return;
}
let listener = tokio::net::TcpListener::bind(&address).await.unwrap();
axum::serve(listener, app_router).await.unwrap();
}

View File

@ -0,0 +1,33 @@
use rmcp::{
handler::server::router::tool::ToolRouter, tool_handler, ServerHandler,
};
use rmcp::model::*;
/// A generic MCP Server implementation that holds a dynamically built ToolRouter.
#[derive(Clone)]
pub struct McpServerImpl {
tool_router: ToolRouter<McpServerImpl>,
}
impl McpServerImpl {
/// Creates a new McpServerImpl with a pre-built ToolRouter.
pub fn new(tool_router: ToolRouter<McpServerImpl>) -> Self {
Self { tool_router }
}
}
#[tool_handler]
impl ServerHandler for McpServerImpl {
fn get_info(&self) -> ServerInfo {
ServerInfo {
protocol_version: ProtocolVersion::V_2024_11_05,
capabilities: ServerCapabilities::builder()
.enable_tools()
.build(),
server_info: Implementation::from_build_env(),
instructions: Some(
"This is a service generated from service-template.".to_string(),
),
}
}
}

View File

@ -0,0 +1,87 @@
use chrono::{DateTime, NaiveDate, Utc};
use serde_json::Value as JsonValue;
use sqlx::FromRow;
use uuid::Uuid;
#[derive(Debug, Clone, FromRow)]
pub struct CompanyProfile {
pub symbol: String,
pub name: String,
pub industry: Option<String>,
pub list_date: Option<NaiveDate>,
pub additional_info: Option<JsonValue>,
pub updated_at: DateTime<Utc>,
}
#[derive(Debug, Clone, FromRow)]
pub struct TimeSeriesFinancial {
pub symbol: String,
pub metric_name: String,
pub period_date: NaiveDate,
pub value: rust_decimal::Decimal, // Using Decimal for precision with NUMERIC
pub source: Option<String>,
}
#[derive(Debug, Clone, FromRow)]
pub struct DailyMarketData {
pub symbol: String,
pub trade_date: NaiveDate,
pub open_price: Option<rust_decimal::Decimal>,
pub high_price: Option<rust_decimal::Decimal>,
pub low_price: Option<rust_decimal::Decimal>,
pub close_price: Option<rust_decimal::Decimal>,
pub volume: Option<i64>,
pub pe: Option<rust_decimal::Decimal>,
pub pb: Option<rust_decimal::Decimal>,
pub total_mv: Option<rust_decimal::Decimal>,
}
#[derive(Debug, Clone, FromRow)]
pub struct AnalysisResult {
pub id: Uuid,
pub symbol: String,
pub module_id: String,
pub generated_at: DateTime<Utc>,
pub model_name: Option<String>,
pub content: String,
pub meta_data: Option<JsonValue>,
}
#[derive(Debug, Clone, FromRow)]
pub struct SystemConfig {
pub config_key: String,
pub config_value: JsonValue,
pub description: Option<String>,
pub updated_at: DateTime<Utc>,
}
#[derive(Debug, Clone, FromRow)]
pub struct ExecutionLog {
pub id: i64,
pub report_id: Uuid,
pub step_name: String,
pub status: String,
pub start_time: DateTime<Utc>,
pub end_time: Option<DateTime<Utc>>,
pub duration_ms: Option<i32>,
pub token_usage: Option<JsonValue>,
pub error_message: Option<String>,
pub log_details: Option<JsonValue>,
}
#[derive(Debug, Clone, FromRow)]
pub struct RealtimeQuote {
pub symbol: String,
pub market: String,
pub ts: DateTime<Utc>,
pub price: rust_decimal::Decimal,
pub open_price: Option<rust_decimal::Decimal>,
pub high_price: Option<rust_decimal::Decimal>,
pub low_price: Option<rust_decimal::Decimal>,
pub prev_close: Option<rust_decimal::Decimal>,
pub change: Option<rust_decimal::Decimal>,
pub change_percent: Option<rust_decimal::Decimal>,
pub volume: Option<i64>,
pub source: Option<String>,
pub updated_at: DateTime<Utc>,
}

View File

@ -0,0 +1,226 @@
#![allow(unused_imports)]
use axum::{
body::Body,
http::{self, Request, StatusCode},
response::Response,
};
use data_persistence_service::{
self as app,
dtos::{
AnalysisResultDto, CompanyProfileDto, DailyMarketDataBatchDto, DailyMarketDataDto,
NewAnalysisResultDto, TimeSeriesFinancialBatchDto, TimeSeriesFinancialDto,
},
AppState,
};
use http_body_util::BodyExt;
use sqlx::PgPool;
use tower::util::ServiceExt; // for `oneshot`
// Note: We need to make `build_rest_router` and `AppState` public in lib.rs and main.rs respectively.
// This test structure assumes that has been done.
#[sqlx::test]
async fn test_api_upsert_and_get_company(pool: PgPool) {
let state = AppState::new(pool);
let openapi = app::build_openapi_spec();
let app = app::build_rest_router_with_state(openapi, state).unwrap();
// 1. Act: Upsert a new company
let new_company = CompanyProfileDto {
symbol: "API.TEST".to_string(),
name: "API Test Corp".to_string(),
industry: Some("API Testing".to_string()),
list_date: Some(chrono::NaiveDate::from_ymd_opt(2025, 1, 1).unwrap()),
additional_info: None,
};
let request = Request::builder()
.method("PUT")
.uri("/api/v1/companies")
.header("content-type", "application/json")
.body(Body::from(serde_json::to_string(&new_company).unwrap()))
.unwrap();
let response = ServiceExt::oneshot(app.clone().into_service(), request).await.unwrap();
assert_eq!(response.status(), StatusCode::OK);
// 2. Act: Get the company
let request_get = Request::builder()
.method("GET")
.uri("/api/v1/companies/API.TEST")
.body(Body::empty())
.unwrap();
let response_get = ServiceExt::oneshot(app.clone().into_service(), request_get).await.unwrap();
assert_eq!(response_get.status(), StatusCode::OK);
// 3. Assert: Check the response body
let body = response_get.into_body().collect().await.unwrap().to_bytes();
let fetched_company: CompanyProfileDto = serde_json::from_slice(&body).unwrap();
assert_eq!(fetched_company.symbol, new_company.symbol);
assert_eq!(fetched_company.name, new_company.name);
}
#[sqlx::test]
async fn test_api_batch_insert_and_get_financials(pool: PgPool) {
let state = AppState::new(pool);
let openapi = app::build_openapi_spec();
let app = app::build_rest_router_with_state(openapi, state).unwrap();
// 1. Act: Batch insert financials
let financials = TimeSeriesFinancialBatchDto {
records: vec![
TimeSeriesFinancialDto {
symbol: "API.FIN".to_string(),
metric_name: "revenue".to_string(),
period_date: chrono::NaiveDate::from_ymd_opt(2023, 12, 31).unwrap(),
value: 2000.0,
source: Some("api_test".to_string()),
},
],
};
let request = Request::builder()
.method("POST")
.uri("/api/v1/market-data/financials/batch")
.header("content-type", "application/json")
.body(Body::from(serde_json::to_string(&financials).unwrap()))
.unwrap();
let response = ServiceExt::oneshot(app.clone().into_service(), request).await.unwrap();
assert_eq!(response.status(), StatusCode::CREATED);
// 2. Act: Get the financials
let request_get = Request::builder()
.method("GET")
.uri("/api/v1/market-data/financials/API.FIN?metrics=revenue")
.body(Body::empty())
.unwrap();
let response_get = ServiceExt::oneshot(app.clone().into_service(), request_get).await.unwrap();
assert_eq!(response_get.status(), StatusCode::OK);
// 3. Assert: Check the response body
let body = response_get.into_body().collect().await.unwrap().to_bytes();
let fetched_financials: Vec<TimeSeriesFinancialDto> = serde_json::from_slice(&body).unwrap();
assert_eq!(fetched_financials.len(), 1);
assert_eq!(fetched_financials[0].symbol, "API.FIN");
assert_eq!(fetched_financials[0].metric_name, "revenue");
assert_eq!(fetched_financials[0].value, 2000.0);
}
#[sqlx::test]
async fn test_api_batch_insert_and_get_daily(pool: PgPool) {
let state = AppState::new(pool);
let openapi = app::build_openapi_spec();
let app = app::build_rest_router_with_state(openapi, state).unwrap();
// 1. Act: Batch insert daily data
let daily_data = DailyMarketDataBatchDto {
records: vec![
DailyMarketDataDto {
symbol: "API.DAILY".to_string(),
trade_date: chrono::NaiveDate::from_ymd_opt(2024, 1, 5).unwrap(),
close_price: Some(250.5),
// ... other fields are None
open_price: None,
high_price: None,
low_price: None,
volume: None,
pe: None,
pb: None,
total_mv: None,
},
],
};
let request = Request::builder()
.method("POST")
.uri("/api/v1/market-data/daily/batch")
.header("content-type", "application/json")
.body(Body::from(serde_json::to_string(&daily_data).unwrap()))
.unwrap();
let response = ServiceExt::oneshot(app.clone().into_service(), request).await.unwrap();
assert_eq!(response.status(), StatusCode::CREATED);
// 2. Act: Get the daily data
let request_get = Request::builder()
.method("GET")
.uri("/api/v1/market-data/daily/API.DAILY")
.body(Body::empty())
.unwrap();
let response_get = ServiceExt::oneshot(app.clone().into_service(), request_get).await.unwrap();
assert_eq!(response_get.status(), StatusCode::OK);
// 3. Assert: Check the response body
let body = response_get.into_body().collect().await.unwrap().to_bytes();
let fetched_data: Vec<DailyMarketDataDto> = serde_json::from_slice(&body).unwrap();
assert_eq!(fetched_data.len(), 1);
assert_eq!(fetched_data[0].symbol, "API.DAILY");
assert_eq!(fetched_data[0].close_price, Some(250.5));
}
#[sqlx::test]
async fn test_api_create_and_get_analysis(pool: PgPool) {
let state = AppState::new(pool);
let openapi = app::build_openapi_spec();
let app = app::build_rest_router_with_state(openapi, state).unwrap();
// 1. Act: Create a new analysis result
let new_analysis = app::dtos::NewAnalysisResultDto {
symbol: "API.AI".to_string(),
module_id: "bull_case".to_string(),
model_name: Some("test-gpt".to_string()),
content: "This is a test analysis from an API test.".to_string(),
meta_data: None,
};
let request = Request::builder()
.method("POST")
.uri("/api/v1/analysis-results")
.header("content-type", "application/json")
.body(Body::from(serde_json::to_string(&new_analysis).unwrap()))
.unwrap();
let response = ServiceExt::oneshot(app.clone().into_service(), request).await.unwrap();
assert_eq!(response.status(), StatusCode::OK); // Should be 200 based on handler
let body = response.into_body().collect().await.unwrap().to_bytes();
let created_analysis: app::dtos::AnalysisResultDto = serde_json::from_slice(&body).unwrap();
// 2. Act: Get the analysis by ID
let request_get = Request::builder()
.method("GET")
.uri(format!("/api/v1/analysis-results/{}", created_analysis.id))
.body(Body::empty())
.unwrap();
let response_get = ServiceExt::oneshot(app.clone().into_service(), request_get).await.unwrap();
assert_eq!(response_get.status(), StatusCode::OK);
// 3. Assert: Check the response body
let body_get = response_get.into_body().collect().await.unwrap().to_bytes();
let fetched_analysis: app::dtos::AnalysisResultDto = serde_json::from_slice(&body_get).unwrap();
assert_eq!(fetched_analysis.id, created_analysis.id);
assert_eq!(fetched_analysis.symbol, "API.AI");
// 4. Act: Get by query params
let request_query = Request::builder()
.method("GET")
.uri("/api/v1/analysis-results?symbol=API.AI&module_id=bull_case")
.body(Body::empty())
.unwrap();
let response_query = ServiceExt::oneshot(app.clone().into_service(), request_query).await.unwrap();
assert_eq!(response_query.status(), StatusCode::OK);
let body_query = response_query.into_body().collect().await.unwrap().to_bytes();
let fetched_list: Vec<app::dtos::AnalysisResultDto> = serde_json::from_slice(&body_query).unwrap();
assert_eq!(fetched_list.len(), 1);
assert_eq!(fetched_list[0].id, created_analysis.id);
}

View File

@ -0,0 +1,192 @@
// This file will contain integration tests for the database functions in `src/db.rs`.
// We will use the `#[sqlx::test]` macro to run tests in a transaction that is rolled back at the end.
// This ensures that our tests are isolated and do not leave any data in the database.
// Silence unused_imports warning for now, as we will add tests here shortly.
#![allow(unused_imports)]
use data_persistence_service::{
db,
dtos::{CompanyProfileDto, TimeSeriesFinancialDto, DailyMarketDataDto, NewAnalysisResultDto},
models,
};
use sqlx::{postgres::PgPoolOptions, PgPool};
use std::time::Duration;
async fn setup() -> PgPool {
dotenvy::dotenv().ok();
let db_url = std::env::var("DATABASE_URL").expect("DATABASE_URL must be set for tests");
PgPoolOptions::new()
.max_connections(1)
.acquire_timeout(Duration::from_secs(3))
.connect(&db_url)
.await
.expect("Failed to create pool.")
}
// Example test structure
// #[sqlx::test]
// async fn test_some_db_function(pool: PgPool) {
// // 1. Setup: Insert some test data
// // 2. Act: Call the database function
// // 3. Assert: Check the result
// assert!(true);
// }
#[sqlx::test]
async fn test_upsert_and_get_company(pool: PgPool) {
// 1. Setup: Create a test company DTO
let new_company = CompanyProfileDto {
symbol: "TEST.SYM".to_string(),
name: "Test Company Inc.".to_string(),
industry: Some("Testing".to_string()),
list_date: Some(chrono::NaiveDate::from_ymd_opt(2024, 1, 1).unwrap()),
additional_info: Some(serde_json::json!({ "ceo": "John Doe" })),
};
// 2. Act: Call the upsert function
let upsert_result = db::upsert_company(&pool, &new_company).await;
assert!(upsert_result.is_ok());
// 3. Assert: Call the get function and verify the data
let fetched_company = db::get_company_by_symbol(&pool, "TEST.SYM").await.unwrap().unwrap();
assert_eq!(fetched_company.symbol, new_company.symbol);
assert_eq!(fetched_company.name, new_company.name);
assert_eq!(fetched_company.industry, new_company.industry);
assert_eq!(fetched_company.list_date, new_company.list_date);
assert_eq!(fetched_company.additional_info, new_company.additional_info);
// 4. Act (Update): Create a modified DTO and upsert again
let updated_company = CompanyProfileDto {
symbol: "TEST.SYM".to_string(),
name: "Test Company LLC".to_string(), // Name changed
industry: Some("Advanced Testing".to_string()), // Industry changed
list_date: new_company.list_date,
additional_info: new_company.additional_info,
};
let update_result = db::upsert_company(&pool, &updated_company).await;
assert!(update_result.is_ok());
// 5. Assert (Update): Fetch again and verify the updated data
let fetched_updated_company = db::get_company_by_symbol(&pool, "TEST.SYM").await.unwrap().unwrap();
assert_eq!(fetched_updated_company.name, "Test Company LLC");
assert_eq!(fetched_updated_company.industry, Some("Advanced Testing".to_string()));
}
#[sqlx::test]
async fn test_batch_insert_and_get_financials(pool: PgPool) {
// 1. Setup: Create some test financial DTOs
let financials = vec![
TimeSeriesFinancialDto {
symbol: "TEST.FIN".to_string(),
metric_name: "revenue".to_string(),
period_date: chrono::NaiveDate::from_ymd_opt(2023, 12, 31).unwrap(),
value: 1000.0,
source: Some("test".to_string()),
},
TimeSeriesFinancialDto {
symbol: "TEST.FIN".to_string(),
metric_name: "roe".to_string(),
period_date: chrono::NaiveDate::from_ymd_opt(2023, 12, 31).unwrap(),
value: 15.5,
source: Some("test".to_string()),
},
];
// 2. Act: Call the batch insert function
let insert_result = db::batch_insert_financials(&pool, &financials).await;
assert!(insert_result.is_ok());
// 3. Assert: Get all financials and verify
let fetched_all = db::get_financials_by_symbol(&pool, "TEST.FIN", None).await.unwrap();
assert_eq!(fetched_all.len(), 2);
// 4. Assert: Get specific metric and verify
let fetched_roe = db::get_financials_by_symbol(&pool, "TEST.FIN", Some(vec!["roe".to_string()])).await.unwrap();
assert_eq!(fetched_roe.len(), 1);
assert_eq!(fetched_roe[0].metric_name, "roe");
// Note: Comparing decimals requires conversion or a tolerance-based approach
assert_eq!(fetched_roe[0].value.to_string(), "15.5");
}
#[sqlx::test]
async fn test_batch_insert_and_get_daily_data(pool: PgPool) {
// 1. Setup: Create some test daily market data DTOs
let daily_data = vec![
DailyMarketDataDto {
symbol: "TEST.MKT".to_string(),
trade_date: chrono::NaiveDate::from_ymd_opt(2024, 1, 1).unwrap(),
open_price: Some(100.0),
high_price: Some(102.5),
low_price: Some(99.5),
close_price: Some(101.0),
volume: Some(10000),
pe: Some(20.5),
pb: Some(2.1),
total_mv: Some(1000000.0),
},
DailyMarketDataDto {
symbol: "TEST.MKT".to_string(),
trade_date: chrono::NaiveDate::from_ymd_opt(2024, 1, 2).unwrap(),
open_price: Some(101.0),
high_price: Some(103.5),
low_price: Some(100.5),
close_price: Some(102.0),
volume: Some(12000),
pe: Some(20.7),
pb: Some(2.2),
total_mv: Some(1020000.0),
},
];
// 2. Act: Call the batch insert function
let insert_result = db::batch_insert_daily_data(&pool, &daily_data).await;
assert!(insert_result.is_ok());
// 3. Assert: Get all daily data and verify
let fetched_all = db::get_daily_data_by_symbol(&pool, "TEST.MKT", None, None).await.unwrap();
assert_eq!(fetched_all.len(), 2);
assert_eq!(fetched_all[0].trade_date, chrono::NaiveDate::from_ymd_opt(2024, 1, 2).unwrap()); // Desc order
// 4. Assert: Get data within a date range
let start_date = chrono::NaiveDate::from_ymd_opt(2024, 1, 2).unwrap();
let fetched_one = db::get_daily_data_by_symbol(&pool, "TEST.MKT", Some(start_date), None).await.unwrap();
assert_eq!(fetched_one.len(), 1);
assert_eq!(fetched_one[0].trade_date, start_date);
let close_str = fetched_one[0].close_price.unwrap().to_string();
assert!(close_str == "102.0" || close_str == "102");
}
#[sqlx::test]
async fn test_create_and_get_analysis_results(pool: PgPool) {
// 1. Setup: Create a test analysis result DTO
let new_analysis = NewAnalysisResultDto {
symbol: "TEST.AI".to_string(),
module_id: "bull_case".to_string(),
model_name: Some("test-model-v1".to_string()),
content: "This is a bullish analysis.".to_string(),
meta_data: Some(serde_json::json!({ "tokens": 123 })),
};
// 2. Act: Call the create function
let created_result = db::create_analysis_result(&pool, &new_analysis).await.unwrap();
assert_eq!(created_result.symbol, "TEST.AI");
assert_eq!(created_result.module_id, "bull_case");
// 3. Assert: Get by symbol and module_id
let fetched_by_symbol = db::get_analysis_results(&pool, "TEST.AI", Some("bull_case")).await.unwrap();
assert_eq!(fetched_by_symbol.len(), 1);
assert_eq!(fetched_by_symbol[0].id, created_result.id);
assert_eq!(fetched_by_symbol[0].content, new_analysis.content);
// 4. Assert: Get by ID
let fetched_by_id = db::get_analysis_result_by_id(&pool, created_result.id).await.unwrap().unwrap();
assert_eq!(fetched_by_id.symbol, "TEST.AI");
assert_eq!(fetched_by_id.content, new_analysis.content);
// 5. Assert: Get by symbol only
let fetched_all_for_symbol = db::get_analysis_results(&pool, "TEST.AI", None).await.unwrap();
assert_eq!(fetched_all_for_symbol.len(), 1);
}

View File

@ -0,0 +1,30 @@
# Stage 1: Build the application in a build environment
# We use cargo-chef to cache dependencies and speed up future builds
FROM rust:1.78-slim AS chef
WORKDIR /app
RUN cargo install cargo-chef
FROM chef AS planner
COPY . .
# Compute a lock file for dependencies
RUN cargo chef prepare --recipe-path recipe.json
FROM chef AS builder
COPY --from=planner /app/recipe.json recipe.json
# Build dependencies first, this layer will be cached if dependencies don't change
RUN cargo chef cook --release --recipe-path recipe.json
# Copy application code and build the application
COPY . .
RUN cargo build --release --bin data-persistence-service
# Stage 2: Create the final, minimal production image
FROM debian:bookworm-slim AS runtime
WORKDIR /app
# Copy the compiled binary from the builder stage
COPY --from=builder /app/target/release/data-persistence-service /usr/local/bin/
# Copy migrations for `sqlx-cli` if needed at runtime
COPY ./migrations ./migrations
# Expose the port the application will listen on
EXPOSE 8080
# Set the entrypoint for the container
ENTRYPOINT ["/usr/local/bin/data-persistence-service"]