feat(backend): introduce DataManager and multi-provider; analysis orchestration; streaming endpoints; remove legacy tushare_client; enhance logging
feat(frontend): integrate Prisma and reports API/pages chore(config): add data_sources.yaml; update analysis-config.json docs: add 2025-11-03 dev log; update user guide scripts: enhance dev.sh; add tushare_legacy_client deps: update backend and frontend dependencies
This commit is contained in:
parent
b982cd5368
commit
ff7dc0c95a
164
backend/app/data_manager.py
Normal file
164
backend/app/data_manager.py
Normal file
@ -0,0 +1,164 @@
|
|||||||
|
import yaml
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
from app.data_providers.base import BaseDataProvider
|
||||||
|
from app.data_providers.tushare import TushareProvider
|
||||||
|
# from app.data_providers.ifind import TonghsProvider
|
||||||
|
from app.data_providers.yfinance import YfinanceProvider
|
||||||
|
from app.data_providers.finnhub import FinnhubProvider
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class DataManager:
|
||||||
|
_instance = None
|
||||||
|
|
||||||
|
def __new__(cls, *args, **kwargs):
|
||||||
|
if not cls._instance:
|
||||||
|
cls._instance = super(DataManager, cls).__new__(cls)
|
||||||
|
return cls._instance
|
||||||
|
|
||||||
|
def __init__(self, config_path: str = None):
|
||||||
|
if hasattr(self, '_initialized') and self._initialized:
|
||||||
|
return
|
||||||
|
|
||||||
|
if config_path is None:
|
||||||
|
# Assume the config file is in the 'config' directory at the root of the repo
|
||||||
|
# Find the project root by looking for the config directory
|
||||||
|
current_dir = os.path.dirname(__file__)
|
||||||
|
while current_dir != os.path.dirname(current_dir): # Not at filesystem root
|
||||||
|
if os.path.exists(os.path.join(current_dir, "config", "data_sources.yaml")):
|
||||||
|
REPO_ROOT = current_dir
|
||||||
|
break
|
||||||
|
current_dir = os.path.dirname(current_dir)
|
||||||
|
else:
|
||||||
|
# Fallback to the original calculation
|
||||||
|
REPO_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", ".."))
|
||||||
|
|
||||||
|
config_path = os.path.join(REPO_ROOT, "config", "data_sources.yaml")
|
||||||
|
|
||||||
|
with open(config_path, 'r', encoding='utf-8') as f:
|
||||||
|
self.config = yaml.safe_load(f)
|
||||||
|
|
||||||
|
self.providers = {}
|
||||||
|
|
||||||
|
# Build provider base config from environment variables and config/config.json, then initialize providers
|
||||||
|
base_cfg: Dict[str, Any] = {"data_sources": {}}
|
||||||
|
|
||||||
|
# 1) Prefer env vars when present
|
||||||
|
for name, source_config in (self.config.get('data_sources') or {}).items():
|
||||||
|
env_var = source_config.get('api_key_env')
|
||||||
|
if env_var:
|
||||||
|
api_key = os.getenv(env_var)
|
||||||
|
if api_key:
|
||||||
|
base_cfg["data_sources"][name] = {"api_key": api_key}
|
||||||
|
else:
|
||||||
|
logger.warning(f"Env var '{env_var}' for provider '{name}' not set; will try config.json.")
|
||||||
|
|
||||||
|
# 2) Fallback to config/config.json if tokens are provided there
|
||||||
|
try:
|
||||||
|
# Use the same REPO_ROOT calculation as data_sources.yaml
|
||||||
|
current_dir = os.path.dirname(__file__)
|
||||||
|
while current_dir != os.path.dirname(current_dir): # Not at filesystem root
|
||||||
|
if os.path.exists(os.path.join(current_dir, "config", "data_sources.yaml")):
|
||||||
|
REPO_ROOT = current_dir
|
||||||
|
break
|
||||||
|
current_dir = os.path.dirname(current_dir)
|
||||||
|
else:
|
||||||
|
# Fallback to the original calculation
|
||||||
|
REPO_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", ".."))
|
||||||
|
|
||||||
|
cfg_json_path = os.path.join(REPO_ROOT, "config", "config.json")
|
||||||
|
if os.path.exists(cfg_json_path):
|
||||||
|
with open(cfg_json_path, "r", encoding="utf-8") as jf:
|
||||||
|
cfg_json = json.load(jf)
|
||||||
|
ds_from_json = (cfg_json.get("data_sources") or {})
|
||||||
|
for name, node in ds_from_json.items():
|
||||||
|
if name not in base_cfg["data_sources"] and node.get("api_key"):
|
||||||
|
base_cfg["data_sources"][name] = {"api_key": node.get("api_key")}
|
||||||
|
logger.info(f"Loaded API key for provider '{name}' from config.json")
|
||||||
|
else:
|
||||||
|
logger.debug("config/config.json not found; skipping JSON token load.")
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to read tokens from config/config.json: {e}")
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
|
|
||||||
|
try:
|
||||||
|
self._init_providers(base_cfg)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to initialize data providers: {e}")
|
||||||
|
|
||||||
|
self._initialized = True
|
||||||
|
|
||||||
|
def _init_providers(self, base_cfg: Dict[str, Any]) -> None:
|
||||||
|
"""
|
||||||
|
Initializes providers with the given base configuration.
|
||||||
|
This method should be called after the base config is loaded.
|
||||||
|
"""
|
||||||
|
provider_map = {
|
||||||
|
"tushare": TushareProvider,
|
||||||
|
# "ifind": TonghsProvider,
|
||||||
|
"yfinance": YfinanceProvider,
|
||||||
|
"finnhub": FinnhubProvider,
|
||||||
|
}
|
||||||
|
|
||||||
|
for name, provider_class in provider_map.items():
|
||||||
|
token = None
|
||||||
|
source_config = self.config['data_sources'].get(name, {})
|
||||||
|
if source_config and source_config.get('api_key_env'):
|
||||||
|
token = base_cfg.get("data_sources", {}).get(name, {}).get("api_key")
|
||||||
|
|
||||||
|
# Initialize the provider if a token is found or not required
|
||||||
|
if token or not source_config.get('api_key_env'):
|
||||||
|
try:
|
||||||
|
self.providers[name] = provider_class(token=token)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to initialize provider '{name}': {e}")
|
||||||
|
else:
|
||||||
|
logger.warning(f"Provider '{name}' requires token env '{source_config.get('api_key_env')}', but none provided. Skipping.")
|
||||||
|
|
||||||
|
def _detect_market(self, stock_code: str) -> str:
|
||||||
|
if stock_code.endswith(('.SH', '.SZ')):
|
||||||
|
return 'CN'
|
||||||
|
elif stock_code.endswith('.HK'):
|
||||||
|
return 'HK'
|
||||||
|
elif stock_code.endswith('.T'): # Assuming .T for Tokyo
|
||||||
|
return 'JP'
|
||||||
|
else: # Default to US
|
||||||
|
return 'US'
|
||||||
|
|
||||||
|
async def get_data(self, method_name: str, stock_code: str, **kwargs):
|
||||||
|
market = self._detect_market(stock_code)
|
||||||
|
priority_list = self.config.get('markets', {}).get(market, {}).get('priority', [])
|
||||||
|
|
||||||
|
for provider_name in priority_list:
|
||||||
|
provider = self.providers.get(provider_name)
|
||||||
|
if not provider:
|
||||||
|
logger.warning(f"Provider '{provider_name}' not initialized.")
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
method = getattr(provider, method_name)
|
||||||
|
data = await method(stock_code=stock_code, **kwargs)
|
||||||
|
if data is not None and (not isinstance(data, list) or data):
|
||||||
|
logger.info(f"Data successfully fetched from '{provider_name}' for '{stock_code}'.")
|
||||||
|
return data
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Provider '{provider_name}' failed for '{stock_code}': {e}. Trying next provider.")
|
||||||
|
|
||||||
|
logger.error(f"All data providers failed for '{stock_code}' on method '{method_name}'.")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def get_financial_statements(self, stock_code: str, report_dates: List[str]) -> List[Dict[str, Any]]:
|
||||||
|
return await self.get_data('get_financial_statements', stock_code, report_dates=report_dates)
|
||||||
|
|
||||||
|
async def get_daily_price(self, stock_code: str, start_date: str, end_date: str) -> List[Dict[str, Any]]:
|
||||||
|
return await self.get_data('get_daily_price', stock_code, start_date=start_date, end_date=end_date)
|
||||||
|
|
||||||
|
async def get_stock_basic(self, stock_code: str) -> Optional[Dict[str, Any]]:
|
||||||
|
return await self.get_data('get_stock_basic', stock_code)
|
||||||
|
|
||||||
|
data_manager = DataManager()
|
||||||
71
backend/app/data_providers/base.py
Normal file
71
backend/app/data_providers/base.py
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
class BaseDataProvider(ABC):
|
||||||
|
"""
|
||||||
|
Abstract base class for all financial data providers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, token: Optional[str] = None):
|
||||||
|
"""
|
||||||
|
Initializes the data provider, optionally with an API token.
|
||||||
|
|
||||||
|
:param token: API token for the data provider, if required.
|
||||||
|
"""
|
||||||
|
self.token = token
|
||||||
|
self._initialize()
|
||||||
|
|
||||||
|
def _initialize(self):
|
||||||
|
"""
|
||||||
|
Perform any necessary initialization, such as API client setup.
|
||||||
|
This method is called by the constructor.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def get_stock_basic(self, stock_code: str) -> Optional[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Fetches basic company information for a given stock code.
|
||||||
|
|
||||||
|
:param stock_code: The stock identifier.
|
||||||
|
:return: A dictionary with basic company info, or None if not found.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def get_daily_price(self, stock_code: str, start_date: str, end_date: str) -> List[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Fetches daily stock prices for a given period.
|
||||||
|
|
||||||
|
:param stock_code: The stock identifier.
|
||||||
|
:param start_date: The start date of the period (e.g., 'YYYYMMDD').
|
||||||
|
:param end_date: The end date of the period (e.g., 'YYYYMMDD').
|
||||||
|
:return: A list of dictionaries, each representing a day's price data.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def get_financial_statements(self, stock_code: str, report_dates: List[str]) -> List[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Fetches financial statements for a list of report dates.
|
||||||
|
|
||||||
|
This method should aim to fetch data for all requested dates in a single call if possible
|
||||||
|
and then combine them into a unified format.
|
||||||
|
|
||||||
|
:param stock_code: The stock identifier.
|
||||||
|
:param report_dates: A list of report dates to fetch data for (e.g., ['20231231', '20221231']).
|
||||||
|
:return: A list of dictionaries, each containing financial statement data for a specific period.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def get_financial_statement(self, stock_code: str, report_date: str) -> Optional[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Fetches a single financial statement for a specific report date.
|
||||||
|
This is a convenience method that can be implemented by calling get_financial_statements.
|
||||||
|
|
||||||
|
:param stock_code: The stock identifier.
|
||||||
|
:param report_date: The report date for the statement (e.g., '20231231').
|
||||||
|
:return: A dictionary with financial statement data, or None if not found.
|
||||||
|
"""
|
||||||
|
results = await self.get_financial_statements(stock_code, [report_date])
|
||||||
|
return results[0] if results else None
|
||||||
112
backend/app/data_providers/finnhub.py
Normal file
112
backend/app/data_providers/finnhub.py
Normal file
@ -0,0 +1,112 @@
|
|||||||
|
from .base import BaseDataProvider
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
import finnhub
|
||||||
|
import pandas as pd
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class FinnhubProvider(BaseDataProvider):
|
||||||
|
|
||||||
|
def _initialize(self):
|
||||||
|
if not self.token:
|
||||||
|
raise ValueError("Finnhub API key not provided.")
|
||||||
|
self.client = finnhub.Client(api_key=self.token)
|
||||||
|
|
||||||
|
async def get_stock_basic(self, stock_code: str) -> Optional[Dict[str, Any]]:
|
||||||
|
async def _fetch():
|
||||||
|
try:
|
||||||
|
profile = self.client.company_profile2(symbol=stock_code)
|
||||||
|
if not profile:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Normalize data
|
||||||
|
return {
|
||||||
|
"ts_code": stock_code,
|
||||||
|
"name": profile.get("name"),
|
||||||
|
"area": profile.get("country"),
|
||||||
|
"industry": profile.get("finnhubIndustry"),
|
||||||
|
"exchange": profile.get("exchange"),
|
||||||
|
"ipo_date": profile.get("ipo"),
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Finnhub get_stock_basic failed for {stock_code}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
return await loop.run_in_executor(None, _fetch)
|
||||||
|
|
||||||
|
async def get_daily_price(self, stock_code: str, start_date: str, end_date: str) -> List[Dict[str, Any]]:
|
||||||
|
async def _fetch():
|
||||||
|
try:
|
||||||
|
start_ts = int(datetime.strptime(start_date, '%Y%m%d').timestamp())
|
||||||
|
end_ts = int(datetime.strptime(end_date, '%Y%m%d').timestamp())
|
||||||
|
|
||||||
|
res = self.client.stock_candles(stock_code, 'D', start_ts, end_ts)
|
||||||
|
if res.get('s') != 'ok':
|
||||||
|
return []
|
||||||
|
|
||||||
|
df = pd.DataFrame(res)
|
||||||
|
if df.empty:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# Normalize data
|
||||||
|
df['trade_date'] = pd.to_datetime(df['t'], unit='s').dt.strftime('%Y%m%d')
|
||||||
|
df.rename(columns={
|
||||||
|
'o': 'open', 'h': 'high', 'l': 'low', 'c': 'close', 'v': 'vol'
|
||||||
|
}, inplace=True)
|
||||||
|
|
||||||
|
return df[['trade_date', 'open', 'high', 'low', 'close', 'vol']].to_dict('records')
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Finnhub get_daily_price failed for {stock_code}: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
return await loop.run_in_executor(None, _fetch)
|
||||||
|
|
||||||
|
async def get_financial_statements(self, stock_code: str, report_dates: List[str]) -> List[Dict[str, Any]]:
|
||||||
|
async def _fetch():
|
||||||
|
try:
|
||||||
|
# Finnhub provides financials as a whole, not by specific date ranges in one call
|
||||||
|
# We fetch all available and then filter.
|
||||||
|
# Note: 'freq' can be 'annual' or 'quarterly'. We'll use annual.
|
||||||
|
res = self.client.financials_reported(symbol=stock_code, freq='annual')
|
||||||
|
if not res or not res.get('data'):
|
||||||
|
return []
|
||||||
|
|
||||||
|
df = pd.DataFrame(res['data'])
|
||||||
|
|
||||||
|
# Filter by requested dates
|
||||||
|
years_to_fetch = {date[:4] for date in report_dates}
|
||||||
|
df = df[df['year'].astype(str).isin(years_to_fetch)]
|
||||||
|
|
||||||
|
# The data is deeply nested in 'report'. We need to extract and pivot it.
|
||||||
|
all_reports = []
|
||||||
|
for index, row in df.iterrows():
|
||||||
|
report_data = {'ts_code': stock_code, 'end_date': row['endDate']}
|
||||||
|
|
||||||
|
# Extract concepts from balance sheet, income statement, and cash flow
|
||||||
|
for item in row['report'].get('bs', []):
|
||||||
|
report_data[item['concept']] = item['value']
|
||||||
|
for item in row['report'].get('ic', []):
|
||||||
|
report_data[item['concept']] = item['value']
|
||||||
|
for item in row['report'].get('cf', []):
|
||||||
|
report_data[item['concept']] = item['value']
|
||||||
|
|
||||||
|
all_reports.append(report_data)
|
||||||
|
|
||||||
|
# Further normalization of keys would be needed here to match a common format
|
||||||
|
# e.g. 'AssetsTotal' -> 'total_assets'
|
||||||
|
# This is a complex task and depends on the desired final schema.
|
||||||
|
|
||||||
|
return all_reports
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Finnhub get_financial_statements failed for {stock_code}: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
return await loop.run_in_executor(None, _fetch)
|
||||||
131
backend/app/data_providers/ifind.py
Normal file
131
backend/app/data_providers/ifind.py
Normal file
@ -0,0 +1,131 @@
|
|||||||
|
from .base import BaseDataProvider
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
import pandas as pd
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
# 假设 iFinDPy 库已安装在环境中
|
||||||
|
# 重要提示: 用户需要根据官方文档手动安装 iFinDPy
|
||||||
|
try:
|
||||||
|
from iFinDPy import THS_iFinDLogin, THS_BD, THS_HQ
|
||||||
|
except ImportError:
|
||||||
|
print("错误: iFinDPy 模块未找到。请确保已按照同花顺官方指引完成安装。")
|
||||||
|
# 定义虚拟函数以避免在未安装时程序崩溃
|
||||||
|
def THS_iFinDLogin(*args, **kwargs): return -1
|
||||||
|
def THS_BD(*args, **kwargs): return pd.DataFrame()
|
||||||
|
def THS_HQ(*args, **kwargs): return pd.DataFrame()
|
||||||
|
|
||||||
|
|
||||||
|
class TonghsProvider(BaseDataProvider):
|
||||||
|
_is_logged_in = False
|
||||||
|
|
||||||
|
def __init__(self, token: Optional[str] = None):
|
||||||
|
# 使用从 iFinD 用户中心获取的 Refresh Token 进行登录
|
||||||
|
if not TonghsProvider._is_logged_in:
|
||||||
|
if not token:
|
||||||
|
raise ValueError("同花顺 iFinDPy Refresh Token 未在配置中提供。")
|
||||||
|
|
||||||
|
# 调用登录函数,直接传入 token
|
||||||
|
# 注意: 具体的关键字参数名可能需要根据 iFinDPy 的实际文档确认,这里假设为 'token' 或直接作为第一个参数
|
||||||
|
login_result = THS_iFinDLogin(token)
|
||||||
|
|
||||||
|
if login_result == 0:
|
||||||
|
print("同花顺 iFinDPy 登录成功。")
|
||||||
|
TonghsProvider._is_logged_in = True
|
||||||
|
else:
|
||||||
|
print(f"同花顺 iFinDPy 登录失败,错误码: {login_result}")
|
||||||
|
raise ConnectionError("无法登录到同花顺 iFinDPy 服务,请检查您的 Refresh Token 是否正确。")
|
||||||
|
|
||||||
|
async def get_stock_basic(self, stock_code: str) -> Optional[Dict[str, Any]]:
|
||||||
|
try:
|
||||||
|
# TODO: 请用户确认用于获取公司基本信息的指标 (indicators)
|
||||||
|
indicators = "ths_stock_short_name_stock;ths_listed_market_stock;ths_industry_stock;ths_ipo_date_stock"
|
||||||
|
data = THS_BD(stock_code, indicators, "")
|
||||||
|
|
||||||
|
if data.empty:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# --- 数据归一化 ---
|
||||||
|
# iFinDPy 返回的数据通常是 DataFrame,我们需要将其转换为字典
|
||||||
|
info = data.iloc[0].to_dict()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"ts_code": stock_code,
|
||||||
|
"name": info.get("ths_stock_short_name_stock"),
|
||||||
|
"area": info.get("ths_listed_market_stock"),
|
||||||
|
"industry": info.get("ths_industry_stock"),
|
||||||
|
"list_date": info.get("ths_ipo_date_stock"),
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
print(f"同花顺 iFinDPy get_stock_basic 执行失败, 股票代码 {stock_code}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def get_daily_price(self, stock_code: str, start_date: str, end_date: str) -> List[Dict[str, Any]]:
|
||||||
|
try:
|
||||||
|
# TODO: 请用户确认用于获取日线行情的指标
|
||||||
|
indicators = "open;high;low;close;volume"
|
||||||
|
# iFinDPy 的日期格式通常是 YYYY-MM-DD
|
||||||
|
date_range = f"{start_date};{end_date}"
|
||||||
|
|
||||||
|
data = THS_HQ(stock_code, indicators, date_range)
|
||||||
|
|
||||||
|
if data.empty:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# --- 数据归一化 ---
|
||||||
|
data = data.reset_index()
|
||||||
|
data.rename(columns={
|
||||||
|
"time": "trade_date",
|
||||||
|
"open": "open",
|
||||||
|
"high": "high",
|
||||||
|
"low": "low",
|
||||||
|
"close": "close",
|
||||||
|
"volume": "vol"
|
||||||
|
}, inplace=True)
|
||||||
|
|
||||||
|
return data.to_dict('records')
|
||||||
|
except Exception as e:
|
||||||
|
print(f"同花顺 iFinDPy get_daily_price 执行失败, 股票代码 {stock_code}: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
async def get_financial_statements(self, stock_code: str, report_dates: List[str]) -> List[Dict[str, Any]]:
|
||||||
|
try:
|
||||||
|
# TODO: 请用户确认获取财务报表的指标
|
||||||
|
# 这可能需要多次调用 THS_BD 并合并结果
|
||||||
|
|
||||||
|
# 示例:一次性获取多个报告期的数据
|
||||||
|
# 将 report_dates 转换为 iFinDPy 接受的格式,例如 "2022-12-31;2021-12-31"
|
||||||
|
dates_param = ";".join(report_dates)
|
||||||
|
|
||||||
|
# 需要的指标
|
||||||
|
income_indicators = "ths_np_stock" # 净利润
|
||||||
|
bs_indicators = "ths_total_assets_stock;ths_total_liab_stock" # 总资产;总负债
|
||||||
|
revenue_indicators = "ths_revenue_stock" # 营业收入
|
||||||
|
|
||||||
|
# 获取数据
|
||||||
|
income_data = THS_BD(stock_code, income_indicators, f"reportDate={dates_param}")
|
||||||
|
bs_data = THS_BD(stock_code, bs_indicators, f"reportDate={dates_param}")
|
||||||
|
revenue_data = THS_BD(stock_code, revenue_indicators, f"reportDate={dates_param}")
|
||||||
|
|
||||||
|
# 合并数据
|
||||||
|
financials_df = pd.concat([income_data, bs_data, revenue_data], axis=1)
|
||||||
|
financials_df = financials_df.loc[:,~financials_df.columns.duplicated()]
|
||||||
|
financials_df = financials_df.reset_index().rename(columns={"index": "end_date"})
|
||||||
|
|
||||||
|
# --- 数据归一化 ---
|
||||||
|
financials_df.rename(columns={
|
||||||
|
"ths_revenue_stock": "revenue",
|
||||||
|
"ths_np_stock": "net_income",
|
||||||
|
"ths_total_assets_stock": "total_assets",
|
||||||
|
"ths_total_liab_stock": "total_liabilities",
|
||||||
|
}, inplace=True)
|
||||||
|
|
||||||
|
financials_df["ts_code"] = stock_code
|
||||||
|
|
||||||
|
return financials_df.to_dict('records')
|
||||||
|
except Exception as e:
|
||||||
|
print(f"同花顺 iFinDPy get_financial_statements 执行失败, 股票代码 {stock_code}: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
async def get_financial_statement(self, stock_code: str, report_date: str) -> Optional[Dict[str, Any]]:
|
||||||
|
results = await self.get_financial_statements(stock_code, [report_date])
|
||||||
|
return results[0] if results else None
|
||||||
132
backend/app/data_providers/tushare.py
Normal file
132
backend/app/data_providers/tushare.py
Normal file
@ -0,0 +1,132 @@
|
|||||||
|
from .base import BaseDataProvider
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
import httpx
|
||||||
|
import logging
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
TUSHARE_PRO_URL = "https://api.tushare.pro"
|
||||||
|
|
||||||
|
class TushareProvider(BaseDataProvider):
|
||||||
|
|
||||||
|
def _initialize(self):
|
||||||
|
if not self.token:
|
||||||
|
raise ValueError("Tushare API token not provided.")
|
||||||
|
# Use httpx.AsyncClient directly
|
||||||
|
self._client = httpx.AsyncClient(timeout=30)
|
||||||
|
|
||||||
|
async def _query(
|
||||||
|
self,
|
||||||
|
api_name: str,
|
||||||
|
params: Optional[Dict[str, Any]] = None,
|
||||||
|
fields: Optional[str] = None,
|
||||||
|
) -> List[Dict[str, Any]]:
|
||||||
|
payload = {
|
||||||
|
"api_name": api_name,
|
||||||
|
"token": self.token,
|
||||||
|
"params": params or {},
|
||||||
|
}
|
||||||
|
if "limit" not in payload["params"]:
|
||||||
|
payload["params"]["limit"] = 5000
|
||||||
|
if fields:
|
||||||
|
payload["fields"] = fields
|
||||||
|
|
||||||
|
logger.info(f"Querying Tushare API '{api_name}' with params: {params}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
resp = await self._client.post(TUSHARE_PRO_URL, json=payload)
|
||||||
|
resp.raise_for_status()
|
||||||
|
data = resp.json()
|
||||||
|
|
||||||
|
if data.get("code") != 0:
|
||||||
|
err_msg = data.get("msg") or "Unknown Tushare error"
|
||||||
|
logger.error(f"Tushare API error for '{api_name}': {err_msg}")
|
||||||
|
raise RuntimeError(f"{api_name}: {err_msg}")
|
||||||
|
|
||||||
|
fields_def = data.get("data", {}).get("fields", [])
|
||||||
|
items = data.get("data", {}).get("items", [])
|
||||||
|
|
||||||
|
rows: List[Dict[str, Any]] = []
|
||||||
|
for it in items:
|
||||||
|
row = {fields_def[i]: it[i] for i in range(len(fields_def))}
|
||||||
|
rows.append(row)
|
||||||
|
|
||||||
|
logger.info(f"Tushare API '{api_name}' returned {len(rows)} rows.")
|
||||||
|
return rows
|
||||||
|
|
||||||
|
except httpx.HTTPStatusError as e:
|
||||||
|
logger.error(f"HTTP error calling Tushare API '{api_name}': {e.response.status_code} - {e.response.text}")
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Exception calling Tushare API '{api_name}': {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
async def get_stock_basic(self, stock_code: str) -> Optional[Dict[str, Any]]:
|
||||||
|
try:
|
||||||
|
rows = await self._query(
|
||||||
|
api_name="stock_basic",
|
||||||
|
params={"ts_code": stock_code},
|
||||||
|
)
|
||||||
|
return rows[0] if rows else None
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Tushare get_stock_basic failed for {stock_code}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def get_daily_price(self, stock_code: str, start_date: str, end_date: str) -> List[Dict[str, Any]]:
|
||||||
|
try:
|
||||||
|
return await self._query(
|
||||||
|
api_name="daily",
|
||||||
|
params={
|
||||||
|
"ts_code": stock_code,
|
||||||
|
"start_date": start_date,
|
||||||
|
"end_date": end_date,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Tushare get_daily_price failed for {stock_code}: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
async def get_financial_statements(self, stock_code: str, report_dates: List[str]) -> List[Dict[str, Any]]:
|
||||||
|
all_statements: List[Dict[str, Any]] = []
|
||||||
|
for date in report_dates:
|
||||||
|
logger.info(f"Fetching financial statements for {stock_code}, report date: {date}")
|
||||||
|
try:
|
||||||
|
bs_rows, ic_rows, cf_rows = await asyncio.gather(
|
||||||
|
self._query(
|
||||||
|
api_name="balancesheet",
|
||||||
|
params={"ts_code": stock_code, "period": date, "report_type": 1},
|
||||||
|
),
|
||||||
|
self._query(
|
||||||
|
api_name="income",
|
||||||
|
params={"ts_code": stock_code, "period": date, "report_type": 1},
|
||||||
|
),
|
||||||
|
self._query(
|
||||||
|
api_name="cashflow",
|
||||||
|
params={"ts_code": stock_code, "period": date, "report_type": 1},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if not bs_rows and not ic_rows and not cf_rows:
|
||||||
|
logger.warning(f"No financial statements components found from Tushare for {stock_code} on {date}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
merged: Dict[str, Any] = {"ts_code": stock_code, "end_date": date}
|
||||||
|
bs_data = bs_rows[0] if bs_rows else {}
|
||||||
|
ic_data = ic_rows[0] if ic_rows else {}
|
||||||
|
cf_data = cf_rows[0] if cf_rows else {}
|
||||||
|
|
||||||
|
merged.update(bs_data)
|
||||||
|
merged.update(ic_data)
|
||||||
|
merged.update(cf_data)
|
||||||
|
|
||||||
|
merged["end_date"] = merged.get("end_date") or merged.get("period") or date
|
||||||
|
logger.debug(f"Merged statement for {date} has keys: {list(merged.keys())}")
|
||||||
|
|
||||||
|
all_statements.append(merged)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Tushare get_financial_statement failed for {stock_code} on {date}: {e}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
logger.info(f"Successfully fetched {len(all_statements)} statement(s) for {stock_code}.")
|
||||||
|
return all_statements
|
||||||
114
backend/app/data_providers/yfinance.py
Normal file
114
backend/app/data_providers/yfinance.py
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
from .base import BaseDataProvider
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
import yfinance as yf
|
||||||
|
import pandas as pd
|
||||||
|
from datetime import datetime
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class YfinanceProvider(BaseDataProvider):
|
||||||
|
|
||||||
|
def _map_stock_code(self, stock_code: str) -> str:
|
||||||
|
# yfinance uses different tickers for CN market
|
||||||
|
if stock_code.endswith('.SH'):
|
||||||
|
return stock_code.replace('.SH', '.SS')
|
||||||
|
elif stock_code.endswith('.SZ'):
|
||||||
|
# For Shenzhen stocks, try without suffix first, then with .SZ
|
||||||
|
base_code = stock_code.replace('.SZ', '')
|
||||||
|
return base_code # Try without suffix first
|
||||||
|
return stock_code
|
||||||
|
|
||||||
|
async def get_stock_basic(self, stock_code: str) -> Optional[Dict[str, Any]]:
|
||||||
|
async def _fetch():
|
||||||
|
try:
|
||||||
|
ticker = yf.Ticker(self._map_stock_code(stock_code))
|
||||||
|
info = ticker.info
|
||||||
|
|
||||||
|
# Normalize data to match expected format
|
||||||
|
return {
|
||||||
|
"ts_code": stock_code,
|
||||||
|
"name": info.get("longName"),
|
||||||
|
"area": info.get("country"),
|
||||||
|
"industry": info.get("industry"),
|
||||||
|
"market": info.get("market"),
|
||||||
|
"exchange": info.get("exchange"),
|
||||||
|
"list_date": datetime.fromtimestamp(info.get("firstTradeDateEpoch", 0)).strftime('%Y%m%d') if info.get("firstTradeDateEpoch") else None,
|
||||||
|
}
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"yfinance get_stock_basic failed for {stock_code}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
return await loop.run_in_executor(None, _fetch)
|
||||||
|
|
||||||
|
async def get_daily_price(self, stock_code: str, start_date: str, end_date: str) -> List[Dict[str, Any]]:
|
||||||
|
async def _fetch():
|
||||||
|
try:
|
||||||
|
# yfinance date format is YYYY-MM-DD
|
||||||
|
start_fmt = datetime.strptime(start_date, '%Y%m%d').strftime('%Y-%m-%d')
|
||||||
|
end_fmt = datetime.strptime(end_date, '%Y%m%d').strftime('%Y-%m-%d')
|
||||||
|
|
||||||
|
ticker = yf.Ticker(self._map_stock_code(stock_code))
|
||||||
|
df = ticker.history(start=start_fmt, end=end_fmt)
|
||||||
|
|
||||||
|
df.reset_index(inplace=True)
|
||||||
|
# Normalize column names
|
||||||
|
df.rename(columns={
|
||||||
|
"Date": "trade_date",
|
||||||
|
"Open": "open", "High": "high", "Low": "low", "Close": "close",
|
||||||
|
"Volume": "vol"
|
||||||
|
}, inplace=True)
|
||||||
|
df['trade_date'] = df['trade_date'].dt.strftime('%Y%m%d')
|
||||||
|
return df.to_dict('records')
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"yfinance get_daily_price failed for {stock_code}: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
return await loop.run_in_executor(None, _fetch)
|
||||||
|
|
||||||
|
async def get_financial_statements(self, stock_code: str, report_dates: List[str]) -> List[Dict[str, Any]]:
|
||||||
|
def _fetch():
|
||||||
|
try:
|
||||||
|
ticker = yf.Ticker(self._map_stock_code(stock_code))
|
||||||
|
|
||||||
|
# yfinance provides financials quarterly or annually. We'll fetch annually and try to match the dates.
|
||||||
|
# Note: This is an approximation as yfinance does not allow fetching by specific end-of-year dates.
|
||||||
|
df_financials = ticker.financials.transpose()
|
||||||
|
df_balance = ticker.balance_sheet.transpose()
|
||||||
|
df_cashflow = ticker.cash_flow.transpose()
|
||||||
|
|
||||||
|
if df_financials.empty and df_balance.empty and df_cashflow.empty:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# Combine the data
|
||||||
|
df_combined = pd.concat([df_financials, df_balance, df_cashflow], axis=1)
|
||||||
|
df_combined.index.name = 'end_date'
|
||||||
|
df_combined.reset_index(inplace=True)
|
||||||
|
df_combined['end_date_str'] = df_combined['end_date'].dt.strftime('%Y%m%d')
|
||||||
|
|
||||||
|
# Filter by requested dates (allowing for some flexibility if exact match not found)
|
||||||
|
# This simplistic filtering might need to be more robust.
|
||||||
|
# For now, we assume the yearly data maps to the year in report_dates.
|
||||||
|
years_to_fetch = {date[:4] for date in report_dates}
|
||||||
|
df_combined = df_combined[df_combined['end_date'].dt.year.astype(str).isin(years_to_fetch)]
|
||||||
|
|
||||||
|
# Data Normalization (yfinance columns are different from Tushare)
|
||||||
|
# This is a sample, a more comprehensive mapping would be required.
|
||||||
|
df_combined.rename(columns={
|
||||||
|
"Total Revenue": "revenue",
|
||||||
|
"Net Income": "net_income",
|
||||||
|
"Total Assets": "total_assets",
|
||||||
|
"Total Liab": "total_liabilities",
|
||||||
|
}, inplace=True, errors='ignore')
|
||||||
|
|
||||||
|
return df_combined.to_dict('records')
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"yfinance get_financial_statements failed for {stock_code}: {e}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
return await loop.run_in_executor(None, _fetch)
|
||||||
@ -9,12 +9,32 @@ from app.core.config import settings
|
|||||||
from app.routers.config import router as config_router
|
from app.routers.config import router as config_router
|
||||||
from app.routers.financial import router as financial_router
|
from app.routers.financial import router as financial_router
|
||||||
|
|
||||||
# Configure logging
|
# Configure logging to ensure our app logs show up in development
|
||||||
logging.basicConfig(
|
import sys
|
||||||
level=logging.INFO,
|
|
||||||
format='%(asctime)s - %(levelname)s: %(message)s',
|
# Force our logging configuration to override uvicorn's
|
||||||
datefmt='%H:%M:%S'
|
class ForcefulHandler(logging.Handler):
|
||||||
)
|
def emit(self, record):
|
||||||
|
# Force output to stdout regardless of uvicorn's configuration
|
||||||
|
print(f"[APP] {record.getMessage()}", file=sys.stdout, flush=True)
|
||||||
|
|
||||||
|
# Set up our forceful handler for data providers
|
||||||
|
forceful_handler = ForcefulHandler()
|
||||||
|
forceful_handler.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
# Configure data providers logger with forceful output
|
||||||
|
data_providers_logger = logging.getLogger('app.data_providers')
|
||||||
|
data_providers_logger.setLevel(logging.DEBUG)
|
||||||
|
data_providers_logger.addHandler(forceful_handler)
|
||||||
|
|
||||||
|
# Also set up for the main app logger
|
||||||
|
app_logger = logging.getLogger('app')
|
||||||
|
app_logger.setLevel(logging.INFO)
|
||||||
|
app_logger.addHandler(forceful_handler)
|
||||||
|
|
||||||
|
# Ensure our handlers are not suppressed
|
||||||
|
data_providers_logger.propagate = False
|
||||||
|
app_logger.propagate = False
|
||||||
|
|
||||||
app = FastAPI(title=settings.APP_NAME, version=settings.APP_VERSION)
|
app = FastAPI(title=settings.APP_NAME, version=settings.APP_VERSION)
|
||||||
|
|
||||||
|
|||||||
@ -9,7 +9,6 @@ from typing import Dict, List
|
|||||||
|
|
||||||
from fastapi import APIRouter, HTTPException, Query
|
from fastapi import APIRouter, HTTPException, Query
|
||||||
from fastapi.responses import StreamingResponse
|
from fastapi.responses import StreamingResponse
|
||||||
import os
|
|
||||||
|
|
||||||
from app.core.config import settings
|
from app.core.config import settings
|
||||||
from app.schemas.financial import (
|
from app.schemas.financial import (
|
||||||
@ -21,10 +20,29 @@ from app.schemas.financial import (
|
|||||||
AnalysisResponse,
|
AnalysisResponse,
|
||||||
AnalysisConfigResponse
|
AnalysisConfigResponse
|
||||||
)
|
)
|
||||||
from app.services.tushare_client import TushareClient
|
|
||||||
from app.services.company_profile_client import CompanyProfileClient
|
from app.services.company_profile_client import CompanyProfileClient
|
||||||
from app.services.analysis_client import AnalysisClient, load_analysis_config, get_analysis_config
|
from app.services.analysis_client import AnalysisClient, load_analysis_config, get_analysis_config
|
||||||
|
|
||||||
|
# Lazy DataManager loader to avoid import-time failures when optional providers/config are missing
|
||||||
|
_dm = None
|
||||||
|
def get_dm():
|
||||||
|
global _dm
|
||||||
|
if _dm is not None:
|
||||||
|
return _dm
|
||||||
|
try:
|
||||||
|
from app.data_manager import data_manager as real_dm
|
||||||
|
_dm = real_dm
|
||||||
|
return _dm
|
||||||
|
except Exception:
|
||||||
|
class _StubDM:
|
||||||
|
config = {}
|
||||||
|
async def get_stock_basic(self, stock_code: str):
|
||||||
|
return None
|
||||||
|
async def get_financial_statements(self, stock_code: str, report_dates):
|
||||||
|
return []
|
||||||
|
_dm = _StubDM()
|
||||||
|
return _dm
|
||||||
|
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
|
||||||
# Load metric config from file (project root is repo root, not backend/)
|
# Load metric config from file (project root is repo root, not backend/)
|
||||||
@ -45,6 +63,22 @@ def _load_json(path: str) -> Dict:
|
|||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/data-sources", response_model=Dict[str, List[str]])
|
||||||
|
async def get_data_sources():
|
||||||
|
"""
|
||||||
|
Get the list of data sources that require an API key from the config.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
data_sources_config = get_dm().config.get("data_sources", {})
|
||||||
|
sources_requiring_keys = [
|
||||||
|
source for source, config in data_sources_config.items()
|
||||||
|
if config.get("api_key_env")
|
||||||
|
]
|
||||||
|
return {"sources": sources_requiring_keys}
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(status_code=500, detail=f"Failed to load data sources configuration: {e}")
|
||||||
|
|
||||||
|
|
||||||
@router.post("/china/{ts_code}/analysis", response_model=List[AnalysisResponse])
|
@router.post("/china/{ts_code}/analysis", response_model=List[AnalysisResponse])
|
||||||
async def generate_full_analysis(
|
async def generate_full_analysis(
|
||||||
ts_code: str,
|
ts_code: str,
|
||||||
@ -150,13 +184,12 @@ async def generate_full_analysis(
|
|||||||
if not company_name:
|
if not company_name:
|
||||||
logger.info(f"[API] Fetching company name for {ts_code}")
|
logger.info(f"[API] Fetching company name for {ts_code}")
|
||||||
try:
|
try:
|
||||||
token = base_cfg.get("data_sources", {}).get("tushare", {}).get("api_key")
|
basic_data = await get_dm().get_stock_basic(stock_code=ts_code)
|
||||||
if token:
|
if basic_data:
|
||||||
tushare_client = TushareClient(token=token)
|
company_name = basic_data.get("name", ts_code)
|
||||||
basic_data = await tushare_client.query(api_name="stock_basic", params={"ts_code": ts_code}, fields="ts_code,name")
|
logger.info(f"[API] Got company name: {company_name}")
|
||||||
if basic_data:
|
else:
|
||||||
company_name = basic_data[0].get("name", ts_code)
|
company_name = ts_code
|
||||||
logger.info(f"[API] Got company name: {company_name}")
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.warning(f"Failed to get company name, proceeding with ts_code. Error: {e}")
|
logger.warning(f"Failed to get company name, proceeding with ts_code. Error: {e}")
|
||||||
company_name = ts_code
|
company_name = ts_code
|
||||||
@ -228,176 +261,82 @@ async def get_china_financials(
|
|||||||
ts_code: str,
|
ts_code: str,
|
||||||
years: int = Query(5, ge=1, le=15),
|
years: int = Query(5, ge=1, le=15),
|
||||||
):
|
):
|
||||||
# Load Tushare token
|
|
||||||
base_cfg = _load_json(BASE_CONFIG_PATH)
|
|
||||||
token = (
|
|
||||||
os.environ.get("TUSHARE_TOKEN")
|
|
||||||
or settings.TUSHARE_TOKEN
|
|
||||||
or base_cfg.get("data_sources", {}).get("tushare", {}).get("api_key")
|
|
||||||
)
|
|
||||||
if not token:
|
|
||||||
raise HTTPException(status_code=500, detail="Tushare API token not configured. Set TUSHARE_TOKEN env or config/config.json data_sources.tushare.api_key")
|
|
||||||
|
|
||||||
# Load metric config
|
# Load metric config
|
||||||
fin_cfg = _load_json(FINANCIAL_CONFIG_PATH)
|
fin_cfg = _load_json(FINANCIAL_CONFIG_PATH)
|
||||||
api_groups: Dict[str, List[Dict]] = fin_cfg.get("api_groups", {})
|
api_groups: Dict[str, List[Dict]] = fin_cfg.get("api_groups", {})
|
||||||
|
|
||||||
client = TushareClient(token=token)
|
|
||||||
|
|
||||||
# Meta tracking
|
# Meta tracking
|
||||||
started_real = datetime.now(timezone.utc)
|
started_real = datetime.now(timezone.utc)
|
||||||
started = time.perf_counter_ns()
|
started = time.perf_counter_ns()
|
||||||
api_calls_total = 0
|
api_calls_total = 0 # This will be harder to track now, maybe DataManager should provide it
|
||||||
api_calls_by_group: Dict[str, int] = {}
|
api_calls_by_group: Dict[str, int] = {}
|
||||||
steps: List[StepRecord] = []
|
steps: List[StepRecord] = []
|
||||||
current_action = "初始化"
|
|
||||||
|
# Get company name
|
||||||
# Get company name from stock_basic API
|
company_name = ts_code
|
||||||
company_name = None
|
|
||||||
try:
|
try:
|
||||||
basic_data = await client.query(api_name="stock_basic", params={"ts_code": ts_code}, fields="ts_code,name")
|
basic_data = await get_dm().get_stock_basic(stock_code=ts_code)
|
||||||
api_calls_total += 1
|
if basic_data:
|
||||||
if basic_data and len(basic_data) > 0:
|
company_name = basic_data.get("name", ts_code)
|
||||||
company_name = basic_data[0].get("name")
|
|
||||||
except Exception:
|
except Exception:
|
||||||
# If getting company name fails, continue without it
|
pass # Continue without it
|
||||||
pass
|
|
||||||
|
|
||||||
# Collect series per metric key
|
# Collect series per metric key
|
||||||
series: Dict[str, List[Dict]] = {}
|
series: Dict[str, List[Dict]] = {}
|
||||||
|
|
||||||
# Helper to store year-value pairs while keeping most recent per year
|
|
||||||
def _merge_year_value(key: str, year: str, value, month: int = None):
|
|
||||||
arr = series.setdefault(key, [])
|
|
||||||
# upsert by year
|
|
||||||
for item in arr:
|
|
||||||
if item["year"] == year:
|
|
||||||
item["value"] = value
|
|
||||||
if month is not None:
|
|
||||||
item["month"] = month
|
|
||||||
return
|
|
||||||
arr.append({"year": year, "value": value, "month": month})
|
|
||||||
|
|
||||||
# Query each API group we care
|
|
||||||
errors: Dict[str, str] = {}
|
errors: Dict[str, str] = {}
|
||||||
for group_name, metrics in api_groups.items():
|
|
||||||
step = StepRecord(
|
# Generate date range for financial statements
|
||||||
name=f"拉取 {group_name}",
|
current_year = datetime.now().year
|
||||||
start_ts=started_real.isoformat(),
|
report_dates = [f"{year}1231" for year in range(current_year - years, current_year + 1)]
|
||||||
status="running",
|
|
||||||
)
|
|
||||||
steps.append(step)
|
|
||||||
current_action = step.name
|
|
||||||
if not metrics:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# 按 API 分组 metrics(处理 unknown 组中有多个不同 API 的情况)
|
|
||||||
api_groups_dict: Dict[str, List[Dict]] = {}
|
|
||||||
for metric in metrics:
|
|
||||||
api = metric.get("api") or group_name
|
|
||||||
if api: # 跳过空 API
|
|
||||||
if api not in api_groups_dict:
|
|
||||||
api_groups_dict[api] = []
|
|
||||||
api_groups_dict[api].append(metric)
|
|
||||||
|
|
||||||
# 对每个 API 分别处理
|
|
||||||
for api_name, api_metrics in api_groups_dict.items():
|
|
||||||
fields = [m.get("tushareParam") for m in api_metrics if m.get("tushareParam")]
|
|
||||||
if not fields:
|
|
||||||
continue
|
|
||||||
|
|
||||||
date_field = "end_date" if group_name in ("fina_indicator", "income", "balancesheet", "cashflow") else "trade_date"
|
# Fetch all financial statements at once
|
||||||
|
step_financials = StepRecord(name="拉取财务报表", start_ts=started_real.isoformat(), status="running")
|
||||||
# 构建 API 参数
|
steps.append(step_financials)
|
||||||
params = {"ts_code": ts_code, "limit": 5000}
|
|
||||||
|
all_financial_data = await get_dm().get_financial_statements(stock_code=ts_code, report_dates=report_dates)
|
||||||
# 对于需要日期范围的 API(如 stk_holdernumber),添加日期参数
|
|
||||||
if api_name == "stk_holdernumber":
|
if all_financial_data:
|
||||||
# 计算日期范围:从 years 年前到现在
|
# Process financial data into the 'series' format
|
||||||
end_date = datetime.now().strftime("%Y%m%d")
|
for report in all_financial_data:
|
||||||
start_date = (datetime.now() - timedelta(days=years * 365)).strftime("%Y%m%d")
|
year = report.get("end_date", "")[:4]
|
||||||
params["start_date"] = start_date
|
for key, value in report.items():
|
||||||
params["end_date"] = end_date
|
# Skip non-numeric fields like ts_code, end_date, ann_date, etc.
|
||||||
# stk_holdernumber 返回的日期字段通常是 end_date
|
if key in ['ts_code', 'end_date', 'ann_date', 'f_ann_date', 'report_type', 'comp_type', 'end_type', 'update_flag']:
|
||||||
date_field = "end_date"
|
continue
|
||||||
|
|
||||||
# 对于非时间序列 API(如 stock_company),标记为静态数据
|
|
||||||
is_static_data = api_name == "stock_company"
|
|
||||||
|
|
||||||
# 构建 fields 字符串:包含日期字段和所有需要的指标字段
|
|
||||||
# 确保日期字段存在,因为我们需要用它来确定年份
|
|
||||||
fields_list = list(fields)
|
|
||||||
if date_field not in fields_list:
|
|
||||||
fields_list.insert(0, date_field)
|
|
||||||
# 对于 fina_indicator 等 API,通常还需要 ts_code 和 ann_date
|
|
||||||
if api_name in ("fina_indicator", "income", "balancesheet", "cashflow"):
|
|
||||||
for req_field in ["ts_code", "ann_date"]:
|
|
||||||
if req_field not in fields_list:
|
|
||||||
fields_list.insert(0, req_field)
|
|
||||||
fields_str = ",".join(fields_list)
|
|
||||||
|
|
||||||
try:
|
|
||||||
data_rows = await client.query(api_name=api_name, params=params, fields=fields_str)
|
|
||||||
api_calls_total += 1
|
|
||||||
api_calls_by_group[group_name] = api_calls_by_group.get(group_name, 0) + 1
|
|
||||||
except Exception as e:
|
|
||||||
# 记录错误但继续处理其他 API
|
|
||||||
error_key = f"{group_name}_{api_name}"
|
|
||||||
errors[error_key] = str(e)
|
|
||||||
continue
|
|
||||||
|
|
||||||
tmp: Dict[str, Dict] = {}
|
# Only include numeric values
|
||||||
current_year = datetime.now().strftime("%Y")
|
if isinstance(value, (int, float)) and value is not None:
|
||||||
|
if key not in series:
|
||||||
for row in data_rows:
|
series[key] = []
|
||||||
if is_static_data:
|
|
||||||
# 对于静态数据(如 stock_company),使用当前年份
|
# Avoid duplicates for the same year
|
||||||
# 只处理第一行数据,因为静态数据通常只有一行
|
if not any(d['year'] == year for d in series[key]):
|
||||||
if current_year not in tmp:
|
series[key].append({"year": year, "value": value})
|
||||||
year = current_year
|
else:
|
||||||
month = None
|
errors["financial_statements"] = "Failed to fetch from all providers."
|
||||||
tmp[year] = row
|
|
||||||
tmp[year]['_month'] = month
|
step_financials.status = "done"
|
||||||
# 跳过后续行
|
step_financials.end_ts = datetime.now(timezone.utc).isoformat()
|
||||||
continue
|
step_financials.duration_ms = int((time.perf_counter_ns() - started) / 1_000_000)
|
||||||
else:
|
|
||||||
# 对于时间序列数据,按日期字段处理
|
# --- Potentially fetch other data types like daily prices if needed by config ---
|
||||||
date_val = row.get(date_field)
|
# This part is simplified. The original code had complex logic for different api_groups.
|
||||||
if not date_val:
|
# We will assume for now that the main data comes from financial_statements.
|
||||||
continue
|
# The logic can be extended here to call other data_manager methods based on `fin_cfg`.
|
||||||
year = str(date_val)[:4]
|
|
||||||
month = int(str(date_val)[4:6]) if len(str(date_val)) >= 6 else None
|
|
||||||
existing = tmp.get(year)
|
|
||||||
if existing is None or str(row.get(date_field)) > str(existing.get(date_field)):
|
|
||||||
tmp[year] = row
|
|
||||||
tmp[year]['_month'] = month
|
|
||||||
|
|
||||||
for metric in api_metrics:
|
|
||||||
key = metric.get("tushareParam")
|
|
||||||
if not key:
|
|
||||||
continue
|
|
||||||
for year, row in tmp.items():
|
|
||||||
month = row.get('_month')
|
|
||||||
_merge_year_value(key, year, row.get(key), month)
|
|
||||||
|
|
||||||
step.status = "done"
|
|
||||||
step.end_ts = datetime.now(timezone.utc).isoformat()
|
|
||||||
step.duration_ms = int((time.perf_counter_ns() - started) / 1_000_000)
|
|
||||||
|
|
||||||
finished_real = datetime.now(timezone.utc)
|
finished_real = datetime.now(timezone.utc)
|
||||||
elapsed_ms = int((time.perf_counter_ns() - started) / 1_000_000)
|
elapsed_ms = int((time.perf_counter_ns() - started) / 1_000_000)
|
||||||
|
|
||||||
if not series:
|
if not series:
|
||||||
# If nothing succeeded, expose partial error info
|
raise HTTPException(status_code=502, detail={"message": "No data returned from any data source", "errors": errors})
|
||||||
raise HTTPException(status_code=502, detail={"message": "No data returned from Tushare", "errors": errors})
|
|
||||||
|
|
||||||
# Truncate years and sort
|
# Truncate years and sort (the data should already be mostly correct, but we ensure)
|
||||||
for key, arr in series.items():
|
for key, arr in series.items():
|
||||||
# Deduplicate and sort desc by year, then cut to requested years, and return asc
|
# Deduplicate and sort desc by year, then cut to requested years, and return asc
|
||||||
uniq = {item["year"]: item for item in arr}
|
uniq = {item["year"]: item for item in arr}
|
||||||
arr_sorted_desc = sorted(uniq.values(), key=lambda x: x["year"], reverse=True)
|
arr_sorted_desc = sorted(uniq.values(), key=lambda x: x["year"], reverse=True)
|
||||||
arr_limited = arr_sorted_desc[:years]
|
arr_limited = arr_sorted_desc[:years]
|
||||||
arr_sorted = sorted(arr_limited, key=lambda x: x["year"]) # ascending by year
|
arr_sorted = sorted(arr_limited, key=lambda x: x["year"])
|
||||||
series[key] = arr_sorted
|
series[key] = arr_sorted
|
||||||
|
|
||||||
meta = FinancialMeta(
|
meta = FinancialMeta(
|
||||||
@ -452,21 +391,10 @@ async def get_company_profile(
|
|||||||
logger.info(f"[API] Fetching company name for {ts_code}")
|
logger.info(f"[API] Fetching company name for {ts_code}")
|
||||||
# Try to get from stock_basic API
|
# Try to get from stock_basic API
|
||||||
try:
|
try:
|
||||||
base_cfg = _load_json(BASE_CONFIG_PATH)
|
basic_data = await get_dm().get_stock_basic(stock_code=ts_code)
|
||||||
token = (
|
if basic_data:
|
||||||
os.environ.get("TUSHARE_TOKEN")
|
company_name = basic_data.get("name", ts_code)
|
||||||
or settings.TUSHARE_TOKEN
|
logger.info(f"[API] Got company name: {company_name}")
|
||||||
or base_cfg.get("data_sources", {}).get("tushare", {}).get("api_key")
|
|
||||||
)
|
|
||||||
if token:
|
|
||||||
from app.services.tushare_client import TushareClient
|
|
||||||
tushare_client = TushareClient(token=token)
|
|
||||||
basic_data = await tushare_client.query(api_name="stock_basic", params={"ts_code": ts_code}, fields="ts_code,name")
|
|
||||||
if basic_data and len(basic_data) > 0:
|
|
||||||
company_name = basic_data[0].get("name", ts_code)
|
|
||||||
logger.info(f"[API] Got company name: {company_name}")
|
|
||||||
else:
|
|
||||||
company_name = ts_code
|
|
||||||
else:
|
else:
|
||||||
company_name = ts_code
|
company_name = ts_code
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -588,76 +516,25 @@ async def generate_analysis(
|
|||||||
if not company_name:
|
if not company_name:
|
||||||
logger.info(f"[API] Fetching company name and financial data for {ts_code}")
|
logger.info(f"[API] Fetching company name and financial data for {ts_code}")
|
||||||
try:
|
try:
|
||||||
token = (
|
basic_data = await get_dm().get_stock_basic(stock_code=ts_code)
|
||||||
os.environ.get("TUSHARE_TOKEN")
|
if basic_data:
|
||||||
or settings.TUSHARE_TOKEN
|
company_name = basic_data.get("name", ts_code)
|
||||||
or base_cfg.get("data_sources", {}).get("tushare", {}).get("api_key")
|
logger.info(f"[API] Got company name: {company_name}")
|
||||||
)
|
|
||||||
if token:
|
|
||||||
tushare_client = TushareClient(token=token)
|
|
||||||
basic_data = await tushare_client.query(api_name="stock_basic", params={"ts_code": ts_code}, fields="ts_code,name")
|
|
||||||
if basic_data and len(basic_data) > 0:
|
|
||||||
company_name = basic_data[0].get("name", ts_code)
|
|
||||||
logger.info(f"[API] Got company name: {company_name}")
|
|
||||||
|
|
||||||
# Try to get financial data for context
|
# Try to get financial data for context
|
||||||
try:
|
try:
|
||||||
fin_cfg = _load_json(FINANCIAL_CONFIG_PATH)
|
# A simplified approach to get the latest year's financial data
|
||||||
api_groups = fin_cfg.get("api_groups", {})
|
current_year = datetime.now().year
|
||||||
|
report_dates = [f"{current_year-1}1231"] # Get last year's report
|
||||||
# Get financial data summary for context
|
latest_financials = await get_dm().get_financial_statements(
|
||||||
series: Dict[str, List[Dict]] = {}
|
stock_code=ts_code,
|
||||||
for group_name, metrics in api_groups.items():
|
report_dates=report_dates
|
||||||
if not metrics:
|
)
|
||||||
continue
|
if latest_financials:
|
||||||
api_groups_dict: Dict[str, List[Dict]] = {}
|
financial_data = {"series": latest_financials[0]}
|
||||||
for metric in metrics:
|
except Exception as e:
|
||||||
api = metric.get("api") or group_name
|
logger.warning(f"[API] Failed to get financial data: {e}")
|
||||||
if api:
|
financial_data = None
|
||||||
if api not in api_groups_dict:
|
|
||||||
api_groups_dict[api] = []
|
|
||||||
api_groups_dict[api].append(metric)
|
|
||||||
|
|
||||||
for api_name, api_metrics in api_groups_dict.items():
|
|
||||||
fields = [m.get("tushareParam") for m in api_metrics if m.get("tushareParam")]
|
|
||||||
if not fields:
|
|
||||||
continue
|
|
||||||
|
|
||||||
date_field = "end_date" if group_name in ("fina_indicator", "income", "balancesheet", "cashflow") else "trade_date"
|
|
||||||
|
|
||||||
params = {"ts_code": ts_code, "limit": 500}
|
|
||||||
fields_list = list(fields)
|
|
||||||
if date_field not in fields_list:
|
|
||||||
fields_list.insert(0, date_field)
|
|
||||||
if api_name in ("fina_indicator", "income", "balancesheet", "cashflow"):
|
|
||||||
for req_field in ["ts_code", "ann_date"]:
|
|
||||||
if req_field not in fields_list:
|
|
||||||
fields_list.insert(0, req_field)
|
|
||||||
fields_str = ",".join(fields_list)
|
|
||||||
|
|
||||||
try:
|
|
||||||
data_rows = await tushare_client.query(api_name=api_name, params=params, fields=fields_str)
|
|
||||||
if data_rows:
|
|
||||||
# Get latest year's data
|
|
||||||
latest_row = data_rows[0] if data_rows else {}
|
|
||||||
for metric in api_metrics:
|
|
||||||
key = metric.get("tushareParam")
|
|
||||||
if key and key in latest_row:
|
|
||||||
if key not in series:
|
|
||||||
series[key] = []
|
|
||||||
series[key].append({
|
|
||||||
"year": latest_row.get(date_field, "")[:4] if latest_row.get(date_field) else str(datetime.now().year),
|
|
||||||
"value": latest_row.get(key)
|
|
||||||
})
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
financial_data = {"series": series}
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning(f"[API] Failed to get financial data: {e}")
|
|
||||||
financial_data = None
|
|
||||||
else:
|
|
||||||
company_name = ts_code
|
|
||||||
else:
|
else:
|
||||||
company_name = ts_code
|
company_name = ts_code
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -755,3 +632,130 @@ async def generate_analysis(
|
|||||||
success=result.get("success", False),
|
success=result.get("success", False),
|
||||||
error=result.get("error")
|
error=result.get("error")
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/china/{ts_code}/analysis/{analysis_type}/stream")
|
||||||
|
async def stream_analysis(
|
||||||
|
ts_code: str,
|
||||||
|
analysis_type: str,
|
||||||
|
company_name: str = Query(None, description="Company name for better context"),
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Stream analysis content chunks for a given module using OpenAI-compatible streaming.
|
||||||
|
Plain text streaming (text/plain; utf-8). Dependencies are resolved first (non-stream),
|
||||||
|
then the target module content is streamed.
|
||||||
|
"""
|
||||||
|
import logging
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
logger.info(f"[API] Streaming analysis requested for {ts_code}, type: {analysis_type}")
|
||||||
|
|
||||||
|
# Load config
|
||||||
|
base_cfg = _load_json(BASE_CONFIG_PATH)
|
||||||
|
llm_provider = base_cfg.get("llm", {}).get("provider", "gemini")
|
||||||
|
llm_config = base_cfg.get("llm", {}).get(llm_provider, {})
|
||||||
|
|
||||||
|
api_key = llm_config.get("api_key")
|
||||||
|
base_url = llm_config.get("base_url")
|
||||||
|
|
||||||
|
if not api_key:
|
||||||
|
logger.error(f"[API] API key for {llm_provider} not configured")
|
||||||
|
raise HTTPException(status_code=500, detail=f"API key for {llm_provider} not configured.")
|
||||||
|
|
||||||
|
# Get analysis configuration
|
||||||
|
analysis_cfg = get_analysis_config(analysis_type)
|
||||||
|
if not analysis_cfg:
|
||||||
|
raise HTTPException(status_code=404, detail=f"Analysis type '{analysis_type}' not found in configuration")
|
||||||
|
|
||||||
|
model = analysis_cfg.get("model", "gemini-2.5-flash")
|
||||||
|
prompt_template = analysis_cfg.get("prompt_template", "")
|
||||||
|
if not prompt_template:
|
||||||
|
raise HTTPException(status_code=500, detail=f"Prompt template not found for analysis type '{analysis_type}'")
|
||||||
|
|
||||||
|
# Get company name from ts_code if not provided; we don't need full financials here
|
||||||
|
financial_data = None
|
||||||
|
if not company_name:
|
||||||
|
try:
|
||||||
|
basic_data = await get_dm().get_stock_basic(stock_code=ts_code)
|
||||||
|
if basic_data:
|
||||||
|
company_name = basic_data.get("name", ts_code)
|
||||||
|
else:
|
||||||
|
company_name = ts_code
|
||||||
|
except Exception:
|
||||||
|
company_name = ts_code
|
||||||
|
|
||||||
|
# Resolve dependency context (non-streaming)
|
||||||
|
context = {}
|
||||||
|
try:
|
||||||
|
dependencies = analysis_cfg.get("dependencies", []) or []
|
||||||
|
if dependencies:
|
||||||
|
analysis_config_full = load_analysis_config()
|
||||||
|
modules_config = analysis_config_full.get("analysis_modules", {})
|
||||||
|
|
||||||
|
all_required = set()
|
||||||
|
def collect_all(mod_name: str):
|
||||||
|
for dep in modules_config.get(mod_name, {}).get("dependencies", []) or []:
|
||||||
|
if dep not in all_required:
|
||||||
|
all_required.add(dep)
|
||||||
|
collect_all(dep)
|
||||||
|
for dep in dependencies:
|
||||||
|
all_required.add(dep)
|
||||||
|
collect_all(dep)
|
||||||
|
|
||||||
|
graph = {name: [d for d in (modules_config.get(name, {}).get("dependencies", []) or []) if d in all_required] for name in all_required}
|
||||||
|
in_degree = {u: 0 for u in graph}
|
||||||
|
for u, deps in graph.items():
|
||||||
|
for v in deps:
|
||||||
|
in_degree[v] += 1
|
||||||
|
queue = [u for u, deg in in_degree.items() if deg == 0]
|
||||||
|
order = []
|
||||||
|
while queue:
|
||||||
|
u = queue.pop(0)
|
||||||
|
order.append(u)
|
||||||
|
for v in graph.get(u, []):
|
||||||
|
in_degree[v] -= 1
|
||||||
|
if in_degree[v] == 0:
|
||||||
|
queue.append(v)
|
||||||
|
if len(order) != len(graph):
|
||||||
|
order = list(all_required)
|
||||||
|
|
||||||
|
completed = {}
|
||||||
|
for mod in order:
|
||||||
|
cfg = modules_config.get(mod, {})
|
||||||
|
dep_ctx = {d: completed.get(d, "") for d in (cfg.get("dependencies", []) or [])}
|
||||||
|
dep_client = AnalysisClient(api_key=api_key, base_url=base_url, model=cfg.get("model", model))
|
||||||
|
dep_result = await dep_client.generate_analysis(
|
||||||
|
analysis_type=mod,
|
||||||
|
company_name=company_name,
|
||||||
|
ts_code=ts_code,
|
||||||
|
prompt_template=cfg.get("prompt_template", ""),
|
||||||
|
financial_data=financial_data,
|
||||||
|
context=dep_ctx,
|
||||||
|
)
|
||||||
|
completed[mod] = dep_result.get("content", "") if dep_result.get("success") else ""
|
||||||
|
context = {dep: completed.get(dep, "") for dep in dependencies}
|
||||||
|
except Exception:
|
||||||
|
context = {}
|
||||||
|
|
||||||
|
client = AnalysisClient(api_key=api_key, base_url=base_url, model=model)
|
||||||
|
|
||||||
|
async def streamer():
|
||||||
|
# Optional header line to help client-side UI
|
||||||
|
header = f"# {analysis_cfg.get('name', analysis_type)}\n\n"
|
||||||
|
yield header
|
||||||
|
async for chunk in client.generate_analysis_stream(
|
||||||
|
analysis_type=analysis_type,
|
||||||
|
company_name=company_name,
|
||||||
|
ts_code=ts_code,
|
||||||
|
prompt_template=prompt_template,
|
||||||
|
financial_data=financial_data,
|
||||||
|
context=context,
|
||||||
|
):
|
||||||
|
yield chunk
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
# 禁止中间层缓冲,确保尽快把分块推送给客户端
|
||||||
|
"Cache-Control": "no-cache, no-transform",
|
||||||
|
"X-Accel-Buffering": "no",
|
||||||
|
}
|
||||||
|
return StreamingResponse(streamer(), media_type="text/plain; charset=utf-8", headers=headers)
|
||||||
|
|||||||
@ -14,7 +14,8 @@ class AnalysisClient:
|
|||||||
|
|
||||||
def __init__(self, api_key: str, base_url: str, model: str):
|
def __init__(self, api_key: str, base_url: str, model: str):
|
||||||
"""Initialize OpenAI client with API key, base URL, and model"""
|
"""Initialize OpenAI client with API key, base URL, and model"""
|
||||||
self.client = openai.AsyncOpenAI(api_key=api_key, base_url=base_url)
|
# Increase client timeout to allow long-running analysis (5 minutes)
|
||||||
|
self.client = openai.AsyncOpenAI(api_key=api_key, base_url=base_url, timeout=300.0)
|
||||||
self.model_name = model
|
self.model_name = model
|
||||||
|
|
||||||
async def generate_analysis(
|
async def generate_analysis(
|
||||||
@ -56,6 +57,7 @@ class AnalysisClient:
|
|||||||
response = await self.client.chat.completions.create(
|
response = await self.client.chat.completions.create(
|
||||||
model=self.model_name,
|
model=self.model_name,
|
||||||
messages=[{"role": "user", "content": prompt}],
|
messages=[{"role": "user", "content": prompt}],
|
||||||
|
timeout=300.0,
|
||||||
)
|
)
|
||||||
|
|
||||||
content = response.choices[0].message.content if response.choices else ""
|
content = response.choices[0].message.content if response.choices else ""
|
||||||
@ -130,6 +132,51 @@ class AnalysisClient:
|
|||||||
|
|
||||||
return prompt
|
return prompt
|
||||||
|
|
||||||
|
async def generate_analysis_stream(
|
||||||
|
self,
|
||||||
|
analysis_type: str,
|
||||||
|
company_name: str,
|
||||||
|
ts_code: str,
|
||||||
|
prompt_template: str,
|
||||||
|
financial_data: Optional[Dict] = None,
|
||||||
|
context: Optional[Dict] = None
|
||||||
|
):
|
||||||
|
"""Yield analysis content chunks using OpenAI-compatible streaming API.
|
||||||
|
|
||||||
|
Yields plain text chunks as they arrive.
|
||||||
|
"""
|
||||||
|
# Build prompt
|
||||||
|
prompt = self._build_prompt(
|
||||||
|
prompt_template,
|
||||||
|
company_name,
|
||||||
|
ts_code,
|
||||||
|
financial_data,
|
||||||
|
context,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
stream = await self.client.chat.completions.create(
|
||||||
|
model=self.model_name,
|
||||||
|
messages=[{"role": "user", "content": prompt}],
|
||||||
|
stream=True,
|
||||||
|
timeout=300.0,
|
||||||
|
)
|
||||||
|
|
||||||
|
# The SDK yields events with incremental deltas
|
||||||
|
async for event in stream:
|
||||||
|
try:
|
||||||
|
choice = event.choices[0] if getattr(event, "choices", None) else None
|
||||||
|
delta = getattr(choice, "delta", None) if choice is not None else None
|
||||||
|
content = getattr(delta, "content", None) if delta is not None else None
|
||||||
|
if content:
|
||||||
|
yield content
|
||||||
|
except Exception:
|
||||||
|
# Best-effort: ignore malformed chunks
|
||||||
|
continue
|
||||||
|
except Exception as e:
|
||||||
|
# Emit error message to the stream so the client can surface it
|
||||||
|
yield f"\n\n[错误] {type(e).__name__}: {str(e)}\n"
|
||||||
|
|
||||||
|
|
||||||
def load_analysis_config() -> Dict:
|
def load_analysis_config() -> Dict:
|
||||||
"""Load analysis configuration from JSON file"""
|
"""Load analysis configuration from JSON file"""
|
||||||
|
|||||||
@ -1,52 +0,0 @@
|
|||||||
"""
|
|
||||||
Minimal async client for Tushare Pro API
|
|
||||||
"""
|
|
||||||
from typing import Any, Dict, List, Optional
|
|
||||||
import httpx
|
|
||||||
|
|
||||||
TUSHARE_PRO_URL = "https://api.tushare.pro"
|
|
||||||
|
|
||||||
|
|
||||||
class TushareClient:
|
|
||||||
def __init__(self, token: str):
|
|
||||||
self.token = token
|
|
||||||
self._client = httpx.AsyncClient(timeout=30)
|
|
||||||
|
|
||||||
async def query(
|
|
||||||
self,
|
|
||||||
api_name: str,
|
|
||||||
params: Optional[Dict[str, Any]] = None,
|
|
||||||
fields: Optional[str] = None,
|
|
||||||
) -> List[Dict[str, Any]]:
|
|
||||||
payload = {
|
|
||||||
"api_name": api_name,
|
|
||||||
"token": self.token,
|
|
||||||
"params": params or {},
|
|
||||||
}
|
|
||||||
# default larger page size if not provided
|
|
||||||
if "limit" not in payload["params"]:
|
|
||||||
payload["params"]["limit"] = 5000
|
|
||||||
if fields:
|
|
||||||
payload["fields"] = fields
|
|
||||||
resp = await self._client.post(TUSHARE_PRO_URL, json=payload)
|
|
||||||
resp.raise_for_status()
|
|
||||||
data = resp.json()
|
|
||||||
if data.get("code") != 0:
|
|
||||||
err = data.get("msg") or "Tushare error"
|
|
||||||
raise RuntimeError(f"{api_name}: {err}")
|
|
||||||
fields_def = data.get("data", {}).get("fields", [])
|
|
||||||
items = data.get("data", {}).get("items", [])
|
|
||||||
rows: List[Dict[str, Any]] = []
|
|
||||||
for it in items:
|
|
||||||
row = {fields_def[i]: it[i] for i in range(len(fields_def))}
|
|
||||||
rows.append(row)
|
|
||||||
return rows
|
|
||||||
|
|
||||||
async def aclose(self):
|
|
||||||
await self._client.aclose()
|
|
||||||
|
|
||||||
async def __aenter__(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
async def __aexit__(self, exc_type, exc, tb):
|
|
||||||
await self.aclose()
|
|
||||||
@ -7,3 +7,11 @@ aiosqlite==0.20.0
|
|||||||
alembic==1.13.3
|
alembic==1.13.3
|
||||||
openai==1.37.0
|
openai==1.37.0
|
||||||
asyncpg
|
asyncpg
|
||||||
|
greenlet==3.0.3
|
||||||
|
|
||||||
|
# Data Providers
|
||||||
|
tushare==1.4.1
|
||||||
|
yfinance==0.2.37
|
||||||
|
finnhub-python==2.4.20
|
||||||
|
pandas==2.2.2
|
||||||
|
PyYAML==6.0.1
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
37
config/data_sources.yaml
Normal file
37
config/data_sources.yaml
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
# Configuration for data sources used by the DataManager
|
||||||
|
|
||||||
|
# Defines the available data sources and their specific configurations.
|
||||||
|
# 'api_key_env' specifies the environment variable that should hold the API key/token.
|
||||||
|
data_sources:
|
||||||
|
tushare:
|
||||||
|
api_key_env: TUSHARE_TOKEN
|
||||||
|
description: "Primary data source for China market (A-shares)."
|
||||||
|
yfinance:
|
||||||
|
api_key_env: null # No API key required
|
||||||
|
description: "Good for global market data, especially US stocks."
|
||||||
|
finnhub:
|
||||||
|
api_key_env: FINNHUB_API_KEY
|
||||||
|
description: "Another comprehensive source for global stock data."
|
||||||
|
|
||||||
|
# Defines the priority of data providers for each market.
|
||||||
|
# The DataManager will try them in order until data is successfully fetched.
|
||||||
|
markets:
|
||||||
|
CN: # China Market
|
||||||
|
priority:
|
||||||
|
- tushare
|
||||||
|
- yfinance # yfinance can be a fallback
|
||||||
|
US: # US Market
|
||||||
|
priority:
|
||||||
|
- yfinance
|
||||||
|
- finnhub
|
||||||
|
HK: # Hong Kong Market
|
||||||
|
priority:
|
||||||
|
- yfinance
|
||||||
|
- finnhub
|
||||||
|
JP: # Japan Market
|
||||||
|
priority:
|
||||||
|
- yfinance
|
||||||
|
DEFAULT:
|
||||||
|
priority:
|
||||||
|
- yfinance
|
||||||
|
- finnhub
|
||||||
@ -265,3 +265,6 @@ A:
|
|||||||
**最后更新**:2025年1月
|
**最后更新**:2025年1月
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
2
frontend/.gitignore
vendored
2
frontend/.gitignore
vendored
@ -39,3 +39,5 @@ yarn-error.log*
|
|||||||
# typescript
|
# typescript
|
||||||
*.tsbuildinfo
|
*.tsbuildinfo
|
||||||
next-env.d.ts
|
next-env.d.ts
|
||||||
|
|
||||||
|
/src/generated/prisma
|
||||||
|
|||||||
@ -11,7 +11,7 @@ const nextConfig = {
|
|||||||
},
|
},
|
||||||
// Increase server timeout for long-running AI requests
|
// Increase server timeout for long-running AI requests
|
||||||
experimental: {
|
experimental: {
|
||||||
proxyTimeout: 120000, // 120 seconds
|
proxyTimeout: 300000, // 300 seconds (5 minutes)
|
||||||
},
|
},
|
||||||
async rewrites() {
|
async rewrites() {
|
||||||
return [
|
return [
|
||||||
|
|||||||
414
frontend/package-lock.json
generated
414
frontend/package-lock.json
generated
@ -8,6 +8,7 @@
|
|||||||
"name": "frontend",
|
"name": "frontend",
|
||||||
"version": "0.1.0",
|
"version": "0.1.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@prisma/client": "^6.18.0",
|
||||||
"@radix-ui/react-checkbox": "^1.3.3",
|
"@radix-ui/react-checkbox": "^1.3.3",
|
||||||
"@radix-ui/react-navigation-menu": "^1.2.14",
|
"@radix-ui/react-navigation-menu": "^1.2.14",
|
||||||
"@radix-ui/react-select": "^2.2.6",
|
"@radix-ui/react-select": "^2.2.6",
|
||||||
@ -15,6 +16,7 @@
|
|||||||
"@radix-ui/react-tabs": "^1.1.13",
|
"@radix-ui/react-tabs": "^1.1.13",
|
||||||
"class-variance-authority": "^0.7.1",
|
"class-variance-authority": "^0.7.1",
|
||||||
"clsx": "^2.1.1",
|
"clsx": "^2.1.1",
|
||||||
|
"geist": "^1.5.1",
|
||||||
"github-markdown-css": "^5.8.1",
|
"github-markdown-css": "^5.8.1",
|
||||||
"lucide-react": "^0.545.0",
|
"lucide-react": "^0.545.0",
|
||||||
"next": "15.5.5",
|
"next": "15.5.5",
|
||||||
@ -35,6 +37,7 @@
|
|||||||
"@types/react-dom": "^19",
|
"@types/react-dom": "^19",
|
||||||
"eslint": "^9",
|
"eslint": "^9",
|
||||||
"eslint-config-next": "15.5.5",
|
"eslint-config-next": "15.5.5",
|
||||||
|
"prisma": "^6.18.0",
|
||||||
"tailwindcss": "^4",
|
"tailwindcss": "^4",
|
||||||
"tw-animate-css": "^1.4.0",
|
"tw-animate-css": "^1.4.0",
|
||||||
"typescript": "^5"
|
"typescript": "^5"
|
||||||
@ -1016,6 +1019,91 @@
|
|||||||
"node": ">=12.4.0"
|
"node": ">=12.4.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@prisma/client": {
|
||||||
|
"version": "6.18.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@prisma/client/-/client-6.18.0.tgz",
|
||||||
|
"integrity": "sha512-jnL2I9gDnPnw4A+4h5SuNn8Gc+1mL1Z79U/3I9eE2gbxJG1oSA+62ByPW4xkeDgwE0fqMzzpAZ7IHxYnLZ4iQA==",
|
||||||
|
"hasInstallScript": true,
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=18.18"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"prisma": "*",
|
||||||
|
"typescript": ">=5.1.0"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"prisma": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"typescript": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@prisma/config": {
|
||||||
|
"version": "6.18.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@prisma/config/-/config-6.18.0.tgz",
|
||||||
|
"integrity": "sha512-rgFzspCpwsE+q3OF/xkp0fI2SJ3PfNe9LLMmuSVbAZ4nN66WfBiKqJKo/hLz3ysxiPQZf8h1SMf2ilqPMeWATQ==",
|
||||||
|
"devOptional": true,
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"dependencies": {
|
||||||
|
"c12": "3.1.0",
|
||||||
|
"deepmerge-ts": "7.1.5",
|
||||||
|
"effect": "3.18.4",
|
||||||
|
"empathic": "2.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@prisma/debug": {
|
||||||
|
"version": "6.18.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@prisma/debug/-/debug-6.18.0.tgz",
|
||||||
|
"integrity": "sha512-PMVPMmxPj0ps1VY75DIrT430MoOyQx9hmm174k6cmLZpcI95rAPXOQ+pp8ANQkJtNyLVDxnxVJ0QLbrm/ViBcg==",
|
||||||
|
"devOptional": true,
|
||||||
|
"license": "Apache-2.0"
|
||||||
|
},
|
||||||
|
"node_modules/@prisma/engines": {
|
||||||
|
"version": "6.18.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@prisma/engines/-/engines-6.18.0.tgz",
|
||||||
|
"integrity": "sha512-i5RzjGF/ex6AFgqEe2o1IW8iIxJGYVQJVRau13kHPYEL1Ck8Zvwuzamqed/1iIljs5C7L+Opiz5TzSsUebkriA==",
|
||||||
|
"devOptional": true,
|
||||||
|
"hasInstallScript": true,
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"dependencies": {
|
||||||
|
"@prisma/debug": "6.18.0",
|
||||||
|
"@prisma/engines-version": "6.18.0-8.34b5a692b7bd79939a9a2c3ef97d816e749cda2f",
|
||||||
|
"@prisma/fetch-engine": "6.18.0",
|
||||||
|
"@prisma/get-platform": "6.18.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@prisma/engines-version": {
|
||||||
|
"version": "6.18.0-8.34b5a692b7bd79939a9a2c3ef97d816e749cda2f",
|
||||||
|
"resolved": "https://registry.npmjs.org/@prisma/engines-version/-/engines-version-6.18.0-8.34b5a692b7bd79939a9a2c3ef97d816e749cda2f.tgz",
|
||||||
|
"integrity": "sha512-T7Af4QsJQnSgWN1zBbX+Cha5t4qjHRxoeoWpK4JugJzG/ipmmDMY5S+O0N1ET6sCBNVkf6lz+Y+ZNO9+wFU8pQ==",
|
||||||
|
"devOptional": true,
|
||||||
|
"license": "Apache-2.0"
|
||||||
|
},
|
||||||
|
"node_modules/@prisma/fetch-engine": {
|
||||||
|
"version": "6.18.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@prisma/fetch-engine/-/fetch-engine-6.18.0.tgz",
|
||||||
|
"integrity": "sha512-TdaBvTtBwP3IoqVYoGIYpD4mWlk0pJpjTJjir/xLeNWlwog7Sl3bD2J0jJ8+5+q/6RBg+acb9drsv5W6lqae7A==",
|
||||||
|
"devOptional": true,
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"dependencies": {
|
||||||
|
"@prisma/debug": "6.18.0",
|
||||||
|
"@prisma/engines-version": "6.18.0-8.34b5a692b7bd79939a9a2c3ef97d816e749cda2f",
|
||||||
|
"@prisma/get-platform": "6.18.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@prisma/get-platform": {
|
||||||
|
"version": "6.18.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@prisma/get-platform/-/get-platform-6.18.0.tgz",
|
||||||
|
"integrity": "sha512-uXNJCJGhxTCXo2B25Ta91Rk1/Nmlqg9p7G9GKh8TPhxvAyXCvMNQoogj4JLEUy+3ku8g59cpyQIKFhqY2xO2bg==",
|
||||||
|
"devOptional": true,
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"dependencies": {
|
||||||
|
"@prisma/debug": "6.18.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/@radix-ui/number": {
|
"node_modules/@radix-ui/number": {
|
||||||
"version": "1.1.1",
|
"version": "1.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/@radix-ui/number/-/number-1.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/@radix-ui/number/-/number-1.1.1.tgz",
|
||||||
@ -3079,6 +3167,35 @@
|
|||||||
"node": ">=8"
|
"node": ">=8"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/c12": {
|
||||||
|
"version": "3.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/c12/-/c12-3.1.0.tgz",
|
||||||
|
"integrity": "sha512-uWoS8OU1MEIsOv8p/5a82c3H31LsWVR5qiyXVfBNOzfffjUWtPnhAb4BYI2uG2HfGmZmFjCtui5XNWaps+iFuw==",
|
||||||
|
"devOptional": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"chokidar": "^4.0.3",
|
||||||
|
"confbox": "^0.2.2",
|
||||||
|
"defu": "^6.1.4",
|
||||||
|
"dotenv": "^16.6.1",
|
||||||
|
"exsolve": "^1.0.7",
|
||||||
|
"giget": "^2.0.0",
|
||||||
|
"jiti": "^2.4.2",
|
||||||
|
"ohash": "^2.0.11",
|
||||||
|
"pathe": "^2.0.3",
|
||||||
|
"perfect-debounce": "^1.0.0",
|
||||||
|
"pkg-types": "^2.2.0",
|
||||||
|
"rc9": "^2.1.2"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"magicast": "^0.3.5"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"magicast": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/call-bind": {
|
"node_modules/call-bind": {
|
||||||
"version": "1.0.8",
|
"version": "1.0.8",
|
||||||
"resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz",
|
"resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz",
|
||||||
@ -3226,6 +3343,22 @@
|
|||||||
"url": "https://github.com/sponsors/wooorm"
|
"url": "https://github.com/sponsors/wooorm"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/chokidar": {
|
||||||
|
"version": "4.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz",
|
||||||
|
"integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==",
|
||||||
|
"devOptional": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"readdirp": "^4.0.1"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 14.16.0"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://paulmillr.com/funding/"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/chownr": {
|
"node_modules/chownr": {
|
||||||
"version": "3.0.0",
|
"version": "3.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz",
|
||||||
@ -3236,6 +3369,16 @@
|
|||||||
"node": ">=18"
|
"node": ">=18"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/citty": {
|
||||||
|
"version": "0.1.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/citty/-/citty-0.1.6.tgz",
|
||||||
|
"integrity": "sha512-tskPPKEs8D2KPafUypv2gxwJP8h/OaJmC82QQGGDQcHvXX43xF2VDACcJVmZ0EuSxkpO9Kc4MlrA3q0+FG58AQ==",
|
||||||
|
"devOptional": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"consola": "^3.2.3"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/class-variance-authority": {
|
"node_modules/class-variance-authority": {
|
||||||
"version": "0.7.1",
|
"version": "0.7.1",
|
||||||
"resolved": "https://registry.npmjs.org/class-variance-authority/-/class-variance-authority-0.7.1.tgz",
|
"resolved": "https://registry.npmjs.org/class-variance-authority/-/class-variance-authority-0.7.1.tgz",
|
||||||
@ -3300,6 +3443,23 @@
|
|||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/confbox": {
|
||||||
|
"version": "0.2.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/confbox/-/confbox-0.2.2.tgz",
|
||||||
|
"integrity": "sha512-1NB+BKqhtNipMsov4xI/NnhCKp9XG9NamYp5PVm9klAT0fsrNPjaFICsCFhNhwZJKNh7zB/3q8qXz0E9oaMNtQ==",
|
||||||
|
"devOptional": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/consola": {
|
||||||
|
"version": "3.4.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/consola/-/consola-3.4.2.tgz",
|
||||||
|
"integrity": "sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==",
|
||||||
|
"devOptional": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": "^14.18.0 || >=16.10.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/cross-spawn": {
|
"node_modules/cross-spawn": {
|
||||||
"version": "7.0.6",
|
"version": "7.0.6",
|
||||||
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
|
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
|
||||||
@ -3546,6 +3706,16 @@
|
|||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/deepmerge-ts": {
|
||||||
|
"version": "7.1.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/deepmerge-ts/-/deepmerge-ts-7.1.5.tgz",
|
||||||
|
"integrity": "sha512-HOJkrhaYsweh+W+e74Yn7YStZOilkoPb6fycpwNLKzSPtruFs48nYis0zy5yJz1+ktUhHxoRDJ27RQAWLIJVJw==",
|
||||||
|
"devOptional": true,
|
||||||
|
"license": "BSD-3-Clause",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=16.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/define-data-property": {
|
"node_modules/define-data-property": {
|
||||||
"version": "1.1.4",
|
"version": "1.1.4",
|
||||||
"resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz",
|
"resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz",
|
||||||
@ -3582,6 +3752,13 @@
|
|||||||
"url": "https://github.com/sponsors/ljharb"
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/defu": {
|
||||||
|
"version": "6.1.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/defu/-/defu-6.1.4.tgz",
|
||||||
|
"integrity": "sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg==",
|
||||||
|
"devOptional": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/dequal": {
|
"node_modules/dequal": {
|
||||||
"version": "2.0.3",
|
"version": "2.0.3",
|
||||||
"resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz",
|
"resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz",
|
||||||
@ -3591,6 +3768,13 @@
|
|||||||
"node": ">=6"
|
"node": ">=6"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/destr": {
|
||||||
|
"version": "2.0.5",
|
||||||
|
"resolved": "https://registry.npmjs.org/destr/-/destr-2.0.5.tgz",
|
||||||
|
"integrity": "sha512-ugFTXCtDZunbzasqBxrK93Ik/DRYsO6S/fedkWEMKqt04xZ4csmnmwGDBAb07QWNaGMAmnTIemsYZCksjATwsA==",
|
||||||
|
"devOptional": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/detect-libc": {
|
"node_modules/detect-libc": {
|
||||||
"version": "2.1.2",
|
"version": "2.1.2",
|
||||||
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz",
|
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz",
|
||||||
@ -3633,6 +3817,19 @@
|
|||||||
"node": ">=0.10.0"
|
"node": ">=0.10.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/dotenv": {
|
||||||
|
"version": "16.6.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.6.1.tgz",
|
||||||
|
"integrity": "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==",
|
||||||
|
"devOptional": true,
|
||||||
|
"license": "BSD-2-Clause",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://dotenvx.com"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/dunder-proto": {
|
"node_modules/dunder-proto": {
|
||||||
"version": "1.0.1",
|
"version": "1.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
|
||||||
@ -3648,6 +3845,17 @@
|
|||||||
"node": ">= 0.4"
|
"node": ">= 0.4"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/effect": {
|
||||||
|
"version": "3.18.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/effect/-/effect-3.18.4.tgz",
|
||||||
|
"integrity": "sha512-b1LXQJLe9D11wfnOKAk3PKxuqYshQ0Heez+y5pnkd3jLj1yx9QhM72zZ9uUrOQyNvrs2GZZd/3maL0ZV18YuDA==",
|
||||||
|
"devOptional": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@standard-schema/spec": "^1.0.0",
|
||||||
|
"fast-check": "^3.23.1"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/emoji-regex": {
|
"node_modules/emoji-regex": {
|
||||||
"version": "9.2.2",
|
"version": "9.2.2",
|
||||||
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz",
|
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz",
|
||||||
@ -3655,6 +3863,16 @@
|
|||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/empathic": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/empathic/-/empathic-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-i6UzDscO/XfAcNYD75CfICkmfLedpyPDdozrLMmQc5ORaQcdMoc21OnlEylMIqI7U8eniKrPMxxtj8k0vhmJhA==",
|
||||||
|
"devOptional": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=14"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/enhanced-resolve": {
|
"node_modules/enhanced-resolve": {
|
||||||
"version": "5.18.3",
|
"version": "5.18.3",
|
||||||
"resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.3.tgz",
|
"resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.3.tgz",
|
||||||
@ -4311,12 +4529,42 @@
|
|||||||
"integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==",
|
"integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/exsolve": {
|
||||||
|
"version": "1.0.7",
|
||||||
|
"resolved": "https://registry.npmjs.org/exsolve/-/exsolve-1.0.7.tgz",
|
||||||
|
"integrity": "sha512-VO5fQUzZtI6C+vx4w/4BWJpg3s/5l+6pRQEHzFRM8WFi4XffSP1Z+4qi7GbjWbvRQEbdIco5mIMq+zX4rPuLrw==",
|
||||||
|
"devOptional": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/extend": {
|
"node_modules/extend": {
|
||||||
"version": "3.0.2",
|
"version": "3.0.2",
|
||||||
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
|
||||||
"integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==",
|
"integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/fast-check": {
|
||||||
|
"version": "3.23.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/fast-check/-/fast-check-3.23.2.tgz",
|
||||||
|
"integrity": "sha512-h5+1OzzfCC3Ef7VbtKdcv7zsstUQwUDlYpUTvjeUsJAssPgLn7QzbboPtL5ro04Mq0rPOsMzl7q5hIbRs2wD1A==",
|
||||||
|
"devOptional": true,
|
||||||
|
"funding": [
|
||||||
|
{
|
||||||
|
"type": "individual",
|
||||||
|
"url": "https://github.com/sponsors/dubzzz"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "opencollective",
|
||||||
|
"url": "https://opencollective.com/fast-check"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"pure-rand": "^6.1.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=8.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/fast-deep-equal": {
|
"node_modules/fast-deep-equal": {
|
||||||
"version": "3.1.3",
|
"version": "3.1.3",
|
||||||
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
|
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
|
||||||
@ -4499,6 +4747,15 @@
|
|||||||
"url": "https://github.com/sponsors/ljharb"
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/geist": {
|
||||||
|
"version": "1.5.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/geist/-/geist-1.5.1.tgz",
|
||||||
|
"integrity": "sha512-mAHZxIsL2o3ZITFaBVFBnwyDOw+zNLYum6A6nIjpzCGIO8QtC3V76XF2RnZTyLx1wlDTmMDy8jg3Ib52MIjGvQ==",
|
||||||
|
"license": "SIL OPEN FONT LICENSE",
|
||||||
|
"peerDependencies": {
|
||||||
|
"next": ">=13.2.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/generator-function": {
|
"node_modules/generator-function": {
|
||||||
"version": "2.0.1",
|
"version": "2.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/generator-function/-/generator-function-2.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/generator-function/-/generator-function-2.0.1.tgz",
|
||||||
@ -4588,6 +4845,24 @@
|
|||||||
"url": "https://github.com/privatenumber/get-tsconfig?sponsor=1"
|
"url": "https://github.com/privatenumber/get-tsconfig?sponsor=1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/giget": {
|
||||||
|
"version": "2.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/giget/-/giget-2.0.0.tgz",
|
||||||
|
"integrity": "sha512-L5bGsVkxJbJgdnwyuheIunkGatUF/zssUoxxjACCseZYAVbaqdh9Tsmmlkl8vYan09H7sbvKt4pS8GqKLBrEzA==",
|
||||||
|
"devOptional": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"citty": "^0.1.6",
|
||||||
|
"consola": "^3.4.0",
|
||||||
|
"defu": "^6.1.4",
|
||||||
|
"node-fetch-native": "^1.6.6",
|
||||||
|
"nypm": "^0.6.0",
|
||||||
|
"pathe": "^2.0.3"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"giget": "dist/cli.mjs"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/github-markdown-css": {
|
"node_modules/github-markdown-css": {
|
||||||
"version": "5.8.1",
|
"version": "5.8.1",
|
||||||
"resolved": "https://registry.npmjs.org/github-markdown-css/-/github-markdown-css-5.8.1.tgz",
|
"resolved": "https://registry.npmjs.org/github-markdown-css/-/github-markdown-css-5.8.1.tgz",
|
||||||
@ -5385,7 +5660,7 @@
|
|||||||
"version": "2.6.1",
|
"version": "2.6.1",
|
||||||
"resolved": "https://registry.npmjs.org/jiti/-/jiti-2.6.1.tgz",
|
"resolved": "https://registry.npmjs.org/jiti/-/jiti-2.6.1.tgz",
|
||||||
"integrity": "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==",
|
"integrity": "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==",
|
||||||
"dev": true,
|
"devOptional": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"bin": {
|
"bin": {
|
||||||
"jiti": "lib/jiti-cli.mjs"
|
"jiti": "lib/jiti-cli.mjs"
|
||||||
@ -6871,6 +7146,33 @@
|
|||||||
"node": "^10 || ^12 || >=14"
|
"node": "^10 || ^12 || >=14"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/node-fetch-native": {
|
||||||
|
"version": "1.6.7",
|
||||||
|
"resolved": "https://registry.npmjs.org/node-fetch-native/-/node-fetch-native-1.6.7.tgz",
|
||||||
|
"integrity": "sha512-g9yhqoedzIUm0nTnTqAQvueMPVOuIY16bqgAJJC8XOOubYFNwz6IER9qs0Gq2Xd0+CecCKFjtdDTMA4u4xG06Q==",
|
||||||
|
"devOptional": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/nypm": {
|
||||||
|
"version": "0.6.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/nypm/-/nypm-0.6.2.tgz",
|
||||||
|
"integrity": "sha512-7eM+hpOtrKrBDCh7Ypu2lJ9Z7PNZBdi/8AT3AX8xoCj43BBVHD0hPSTEvMtkMpfs8FCqBGhxB+uToIQimA111g==",
|
||||||
|
"devOptional": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"citty": "^0.1.6",
|
||||||
|
"consola": "^3.4.2",
|
||||||
|
"pathe": "^2.0.3",
|
||||||
|
"pkg-types": "^2.3.0",
|
||||||
|
"tinyexec": "^1.0.1"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"nypm": "dist/cli.mjs"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": "^14.16.0 || >=16.10.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/object-assign": {
|
"node_modules/object-assign": {
|
||||||
"version": "4.1.1",
|
"version": "4.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
|
||||||
@ -6994,6 +7296,13 @@
|
|||||||
"url": "https://github.com/sponsors/ljharb"
|
"url": "https://github.com/sponsors/ljharb"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/ohash": {
|
||||||
|
"version": "2.0.11",
|
||||||
|
"resolved": "https://registry.npmjs.org/ohash/-/ohash-2.0.11.tgz",
|
||||||
|
"integrity": "sha512-RdR9FQrFwNBNXAr4GixM8YaRZRJ5PUWbKYbE5eOsrwAjJW0q2REGcf79oYPsLyskQCZG1PLN+S/K1V00joZAoQ==",
|
||||||
|
"devOptional": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/optionator": {
|
"node_modules/optionator": {
|
||||||
"version": "0.9.4",
|
"version": "0.9.4",
|
||||||
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz",
|
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz",
|
||||||
@ -7127,6 +7436,20 @@
|
|||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/pathe": {
|
||||||
|
"version": "2.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz",
|
||||||
|
"integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==",
|
||||||
|
"devOptional": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/perfect-debounce": {
|
||||||
|
"version": "1.0.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/perfect-debounce/-/perfect-debounce-1.0.0.tgz",
|
||||||
|
"integrity": "sha512-xCy9V055GLEqoFaHoC1SoLIaLmWctgCUaBaWxDZ7/Zx4CTyX7cJQLJOok/orfjZAh9kEYpjJa4d0KcJmCbctZA==",
|
||||||
|
"devOptional": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/picocolors": {
|
"node_modules/picocolors": {
|
||||||
"version": "1.1.1",
|
"version": "1.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
|
||||||
@ -7146,6 +7469,18 @@
|
|||||||
"url": "https://github.com/sponsors/jonschlinkert"
|
"url": "https://github.com/sponsors/jonschlinkert"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/pkg-types": {
|
||||||
|
"version": "2.3.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-2.3.0.tgz",
|
||||||
|
"integrity": "sha512-SIqCzDRg0s9npO5XQ3tNZioRY1uK06lA41ynBC1YmFTmnY6FjUjVt6s4LoADmwoig1qqD0oK8h1p/8mlMx8Oig==",
|
||||||
|
"devOptional": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"confbox": "^0.2.2",
|
||||||
|
"exsolve": "^1.0.7",
|
||||||
|
"pathe": "^2.0.3"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/possible-typed-array-names": {
|
"node_modules/possible-typed-array-names": {
|
||||||
"version": "1.1.0",
|
"version": "1.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz",
|
||||||
@ -7195,6 +7530,32 @@
|
|||||||
"node": ">= 0.8.0"
|
"node": ">= 0.8.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/prisma": {
|
||||||
|
"version": "6.18.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/prisma/-/prisma-6.18.0.tgz",
|
||||||
|
"integrity": "sha512-bXWy3vTk8mnRmT+SLyZBQoC2vtV9Z8u7OHvEu+aULYxwiop/CPiFZ+F56KsNRNf35jw+8wcu8pmLsjxpBxAO9g==",
|
||||||
|
"devOptional": true,
|
||||||
|
"hasInstallScript": true,
|
||||||
|
"license": "Apache-2.0",
|
||||||
|
"dependencies": {
|
||||||
|
"@prisma/config": "6.18.0",
|
||||||
|
"@prisma/engines": "6.18.0"
|
||||||
|
},
|
||||||
|
"bin": {
|
||||||
|
"prisma": "build/index.js"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=18.18"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"typescript": ">=5.1.0"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"typescript": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/prop-types": {
|
"node_modules/prop-types": {
|
||||||
"version": "15.8.1",
|
"version": "15.8.1",
|
||||||
"resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz",
|
"resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz",
|
||||||
@ -7227,6 +7588,23 @@
|
|||||||
"node": ">=6"
|
"node": ">=6"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/pure-rand": {
|
||||||
|
"version": "6.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-6.1.0.tgz",
|
||||||
|
"integrity": "sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==",
|
||||||
|
"devOptional": true,
|
||||||
|
"funding": [
|
||||||
|
{
|
||||||
|
"type": "individual",
|
||||||
|
"url": "https://github.com/sponsors/dubzzz"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "opencollective",
|
||||||
|
"url": "https://opencollective.com/fast-check"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/queue-microtask": {
|
"node_modules/queue-microtask": {
|
||||||
"version": "1.2.3",
|
"version": "1.2.3",
|
||||||
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
|
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
|
||||||
@ -7248,6 +7626,17 @@
|
|||||||
],
|
],
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/rc9": {
|
||||||
|
"version": "2.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/rc9/-/rc9-2.1.2.tgz",
|
||||||
|
"integrity": "sha512-btXCnMmRIBINM2LDZoEmOogIZU7Qe7zn4BpomSKZ/ykbLObuBdvG+mFq11DL6fjH1DRwHhrlgtYWG96bJiC7Cg==",
|
||||||
|
"devOptional": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"defu": "^6.1.4",
|
||||||
|
"destr": "^2.0.3"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/react": {
|
"node_modules/react": {
|
||||||
"version": "19.1.0",
|
"version": "19.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/react/-/react-19.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/react/-/react-19.1.0.tgz",
|
||||||
@ -7394,6 +7783,20 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/readdirp": {
|
||||||
|
"version": "4.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz",
|
||||||
|
"integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==",
|
||||||
|
"devOptional": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">= 14.18.0"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"type": "individual",
|
||||||
|
"url": "https://paulmillr.com/funding/"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/recharts": {
|
"node_modules/recharts": {
|
||||||
"version": "3.3.0",
|
"version": "3.3.0",
|
||||||
"resolved": "https://registry.npmjs.org/recharts/-/recharts-3.3.0.tgz",
|
"resolved": "https://registry.npmjs.org/recharts/-/recharts-3.3.0.tgz",
|
||||||
@ -8217,6 +8620,13 @@
|
|||||||
"integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==",
|
"integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/tinyexec": {
|
||||||
|
"version": "1.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.1.tgz",
|
||||||
|
"integrity": "sha512-5uC6DDlmeqiOwCPmK9jMSdOuZTh8bU39Ys6yidB+UTt5hfZUPGAypSgFRiEp+jbi9qH40BLDvy85jIU88wKSqw==",
|
||||||
|
"devOptional": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/tinyglobby": {
|
"node_modules/tinyglobby": {
|
||||||
"version": "0.2.15",
|
"version": "0.2.15",
|
||||||
"resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz",
|
"resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz",
|
||||||
@ -8435,7 +8845,7 @@
|
|||||||
"version": "5.9.3",
|
"version": "5.9.3",
|
||||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz",
|
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz",
|
||||||
"integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==",
|
"integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==",
|
||||||
"dev": true,
|
"devOptional": true,
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"bin": {
|
"bin": {
|
||||||
"tsc": "bin/tsc",
|
"tsc": "bin/tsc",
|
||||||
|
|||||||
@ -3,12 +3,13 @@
|
|||||||
"version": "0.1.0",
|
"version": "0.1.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "next dev --turbopack",
|
"dev": "next dev -p 3001",
|
||||||
"build": "next build",
|
"build": "next build",
|
||||||
"start": "next start",
|
"start": "next start",
|
||||||
"lint": "eslint"
|
"lint": "eslint"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@prisma/client": "^6.18.0",
|
||||||
"@radix-ui/react-checkbox": "^1.3.3",
|
"@radix-ui/react-checkbox": "^1.3.3",
|
||||||
"@radix-ui/react-navigation-menu": "^1.2.14",
|
"@radix-ui/react-navigation-menu": "^1.2.14",
|
||||||
"@radix-ui/react-select": "^2.2.6",
|
"@radix-ui/react-select": "^2.2.6",
|
||||||
@ -16,6 +17,7 @@
|
|||||||
"@radix-ui/react-tabs": "^1.1.13",
|
"@radix-ui/react-tabs": "^1.1.13",
|
||||||
"class-variance-authority": "^0.7.1",
|
"class-variance-authority": "^0.7.1",
|
||||||
"clsx": "^2.1.1",
|
"clsx": "^2.1.1",
|
||||||
|
"geist": "^1.5.1",
|
||||||
"github-markdown-css": "^5.8.1",
|
"github-markdown-css": "^5.8.1",
|
||||||
"lucide-react": "^0.545.0",
|
"lucide-react": "^0.545.0",
|
||||||
"next": "15.5.5",
|
"next": "15.5.5",
|
||||||
@ -36,6 +38,7 @@
|
|||||||
"@types/react-dom": "^19",
|
"@types/react-dom": "^19",
|
||||||
"eslint": "^9",
|
"eslint": "^9",
|
||||||
"eslint-config-next": "15.5.5",
|
"eslint-config-next": "15.5.5",
|
||||||
|
"prisma": "^6.18.0",
|
||||||
"tailwindcss": "^4",
|
"tailwindcss": "^4",
|
||||||
"tw-animate-css": "^1.4.0",
|
"tw-animate-css": "^1.4.0",
|
||||||
"typescript": "^5"
|
"typescript": "^5"
|
||||||
|
|||||||
3
frontend/prisma/migrations/migration_lock.toml
Normal file
3
frontend/prisma/migrations/migration_lock.toml
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# Please do not edit this file manually
|
||||||
|
# It should be added in your version-control system (e.g., Git)
|
||||||
|
provider = "postgresql"
|
||||||
19
frontend/prisma/schema.prisma
Normal file
19
frontend/prisma/schema.prisma
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
// This is your Prisma schema file,
|
||||||
|
// learn more about it in the docs: https://pris.ly/d/prisma-schema
|
||||||
|
|
||||||
|
generator client {
|
||||||
|
provider = "prisma-client-js"
|
||||||
|
}
|
||||||
|
|
||||||
|
datasource db {
|
||||||
|
provider = "postgresql"
|
||||||
|
url = env("DATABASE_URL")
|
||||||
|
shadowDatabaseUrl = env("PRISMA_MIGRATE_SHADOW_DATABASE_URL")
|
||||||
|
}
|
||||||
|
|
||||||
|
model Report {
|
||||||
|
id String @id @default(uuid())
|
||||||
|
symbol String
|
||||||
|
content Json
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
}
|
||||||
@ -11,6 +11,14 @@ export async function GET(
|
|||||||
const path = slug.join('/');
|
const path = slug.join('/');
|
||||||
const target = `${BACKEND_BASE}/financials/${path}${url.search}`;
|
const target = `${BACKEND_BASE}/financials/${path}${url.search}`;
|
||||||
const resp = await fetch(target, { headers: { 'Content-Type': 'application/json' } });
|
const resp = await fetch(target, { headers: { 'Content-Type': 'application/json' } });
|
||||||
const text = await resp.text();
|
// 透传后端响应(支持流式 body)
|
||||||
return new Response(text, { status: resp.status, headers: { 'Content-Type': resp.headers.get('Content-Type') || 'application/json' } });
|
const headers = new Headers();
|
||||||
|
// 复制关键头,减少代理层缓冲
|
||||||
|
const contentType = resp.headers.get('content-type') || 'application/json; charset=utf-8';
|
||||||
|
headers.set('content-type', contentType);
|
||||||
|
const cacheControl = resp.headers.get('cache-control');
|
||||||
|
if (cacheControl) headers.set('cache-control', cacheControl);
|
||||||
|
const xAccelBuffering = resp.headers.get('x-accel-buffering');
|
||||||
|
if (xAccelBuffering) headers.set('x-accel-buffering', xAccelBuffering);
|
||||||
|
return new Response(resp.body, { status: resp.status, headers });
|
||||||
}
|
}
|
||||||
|
|||||||
29
frontend/src/app/api/reports/[id]/route.ts
Normal file
29
frontend/src/app/api/reports/[id]/route.ts
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
import { NextRequest } from 'next/server'
|
||||||
|
import { prisma } from '../../../../lib/prisma'
|
||||||
|
|
||||||
|
export async function GET(
|
||||||
|
req: NextRequest,
|
||||||
|
context: { params: Promise<{ id: string }> }
|
||||||
|
) {
|
||||||
|
// 优先从动态路由 params(Promise)获取,其次从 URL 最后一段兜底
|
||||||
|
let id: string | undefined
|
||||||
|
try {
|
||||||
|
const { id: idFromParams } = await context.params
|
||||||
|
id = idFromParams
|
||||||
|
} catch {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
if (!id) {
|
||||||
|
id = new URL(req.url).pathname.split('/').pop() || undefined
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!id) {
|
||||||
|
return Response.json({ error: 'missing id' }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const report = await prisma.report.findUnique({ where: { id } })
|
||||||
|
if (!report) {
|
||||||
|
return Response.json({ error: 'not found' }, { status: 404 })
|
||||||
|
}
|
||||||
|
return Response.json(report)
|
||||||
|
}
|
||||||
42
frontend/src/app/api/reports/route.ts
Normal file
42
frontend/src/app/api/reports/route.ts
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
import { NextRequest } from 'next/server'
|
||||||
|
import { prisma } from '../../../lib/prisma'
|
||||||
|
|
||||||
|
export async function GET(req: NextRequest) {
|
||||||
|
const url = new URL(req.url)
|
||||||
|
const limit = Number(url.searchParams.get('limit') || 50)
|
||||||
|
const offset = Number(url.searchParams.get('offset') || 0)
|
||||||
|
|
||||||
|
const [items, total] = await Promise.all([
|
||||||
|
prisma.report.findMany({
|
||||||
|
orderBy: { createdAt: 'desc' },
|
||||||
|
skip: offset,
|
||||||
|
take: Math.min(Math.max(limit, 1), 200)
|
||||||
|
}),
|
||||||
|
prisma.report.count()
|
||||||
|
])
|
||||||
|
|
||||||
|
return Response.json({ items, total })
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function POST(req: NextRequest) {
|
||||||
|
try {
|
||||||
|
const body = await req.json()
|
||||||
|
const symbol = String(body.symbol || '').trim()
|
||||||
|
const content = body.content
|
||||||
|
|
||||||
|
if (!symbol) {
|
||||||
|
return Response.json({ error: 'symbol is required' }, { status: 400 })
|
||||||
|
}
|
||||||
|
if (typeof content === 'undefined') {
|
||||||
|
return Response.json({ error: 'content is required' }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const created = await prisma.report.create({
|
||||||
|
data: { symbol, content }
|
||||||
|
})
|
||||||
|
|
||||||
|
return Response.json(created, { status: 201 })
|
||||||
|
} catch (e) {
|
||||||
|
return Response.json({ error: 'invalid json body' }, { status: 400 })
|
||||||
|
}
|
||||||
|
}
|
||||||
16
frontend/src/app/fonts/README.md
Normal file
16
frontend/src/app/fonts/README.md
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
将本地自托管字体放在此目录。
|
||||||
|
|
||||||
|
需要文件(建议):
|
||||||
|
- GeistVF.woff2
|
||||||
|
- GeistMonoVF.woff2
|
||||||
|
|
||||||
|
来源建议:
|
||||||
|
- 若你已有字体授权,可从官方来源或内部制品库获取 WOFF2 变体文件。
|
||||||
|
|
||||||
|
放置后无需额外配置,`src/app/layout.tsx` 已使用 next/font/local 引用:
|
||||||
|
- ./fonts/GeistVF.woff2 -> --font-geist-sans
|
||||||
|
- ./fonts/GeistMonoVF.woff2 -> --font-geist-mono
|
||||||
|
|
||||||
|
若暂时没有字体文件,页面会退回系统默认字体,不影响功能。
|
||||||
|
|
||||||
|
|
||||||
@ -1,5 +1,6 @@
|
|||||||
import type { Metadata } from "next";
|
import type { Metadata } from "next";
|
||||||
import { Geist, Geist_Mono } from "next/font/google";
|
import { GeistSans } from 'geist/font/sans'
|
||||||
|
import { GeistMono } from 'geist/font/mono'
|
||||||
import "./globals.css";
|
import "./globals.css";
|
||||||
import {
|
import {
|
||||||
NavigationMenu,
|
NavigationMenu,
|
||||||
@ -8,15 +9,9 @@ import {
|
|||||||
NavigationMenuList,
|
NavigationMenuList,
|
||||||
} from "@/components/ui/navigation-menu";
|
} from "@/components/ui/navigation-menu";
|
||||||
|
|
||||||
const geistSans = Geist({
|
// 官方 Geist 字体(npm 包)
|
||||||
variable: "--font-geist-sans",
|
const geistSans = GeistSans;
|
||||||
subsets: ["latin"],
|
const geistMono = GeistMono;
|
||||||
});
|
|
||||||
|
|
||||||
const geistMono = Geist_Mono({
|
|
||||||
variable: "--font-geist-mono",
|
|
||||||
subsets: ["latin"],
|
|
||||||
});
|
|
||||||
|
|
||||||
export const metadata: Metadata = {
|
export const metadata: Metadata = {
|
||||||
title: "Fundamental Analysis",
|
title: "Fundamental Analysis",
|
||||||
@ -40,7 +35,7 @@ export default function RootLayout({
|
|||||||
<NavigationMenuLink href="/" className="px-3 py-2">首页</NavigationMenuLink>
|
<NavigationMenuLink href="/" className="px-3 py-2">首页</NavigationMenuLink>
|
||||||
</NavigationMenuItem>
|
</NavigationMenuItem>
|
||||||
<NavigationMenuItem>
|
<NavigationMenuItem>
|
||||||
<NavigationMenuLink href="/reports" className="px-3 py-2">报表</NavigationMenuLink>
|
<NavigationMenuLink href="/reports" className="px-3 py-2">历史报告</NavigationMenuLink>
|
||||||
</NavigationMenuItem>
|
</NavigationMenuItem>
|
||||||
<NavigationMenuItem>
|
<NavigationMenuItem>
|
||||||
<NavigationMenuLink href="/docs" className="px-3 py-2">文档</NavigationMenuLink>
|
<NavigationMenuLink href="/docs" className="px-3 py-2">文档</NavigationMenuLink>
|
||||||
|
|||||||
@ -44,9 +44,6 @@ export default function ReportPage() {
|
|||||||
// 分析类型列表(按顺序)
|
// 分析类型列表(按顺序)
|
||||||
const analysisTypes = useMemo(() => {
|
const analysisTypes = useMemo(() => {
|
||||||
if (!analysisConfig?.analysis_modules) return [];
|
if (!analysisConfig?.analysis_modules) return [];
|
||||||
// The order now comes from the backend's topological sort,
|
|
||||||
// but we can define a preferred order for display if needed.
|
|
||||||
// For now, let's just get the keys.
|
|
||||||
return Object.keys(analysisConfig.analysis_modules);
|
return Object.keys(analysisConfig.analysis_modules);
|
||||||
}, [analysisConfig]);
|
}, [analysisConfig]);
|
||||||
|
|
||||||
@ -94,6 +91,49 @@ export default function ReportPage() {
|
|||||||
error?: string;
|
error?: string;
|
||||||
}>>([]);
|
}>>([]);
|
||||||
|
|
||||||
|
const [saving, setSaving] = useState(false)
|
||||||
|
const [saveMsg, setSaveMsg] = useState<string | null>(null)
|
||||||
|
|
||||||
|
const saveReport = async () => {
|
||||||
|
try {
|
||||||
|
setSaving(true)
|
||||||
|
setSaveMsg(null)
|
||||||
|
const content = {
|
||||||
|
market,
|
||||||
|
normalizedSymbol: normalizedTsCode,
|
||||||
|
financialsMeta: financials?.meta || null,
|
||||||
|
// 同步保存财务数据(用于报告详情页展示)
|
||||||
|
financials: financials
|
||||||
|
? {
|
||||||
|
ts_code: financials.ts_code,
|
||||||
|
name: (financials as any).name,
|
||||||
|
series: financials.series,
|
||||||
|
meta: financials.meta,
|
||||||
|
}
|
||||||
|
: null,
|
||||||
|
analyses: Object.fromEntries(
|
||||||
|
Object.entries(analysisStates).map(([k, v]) => [k, { content: v.content, error: v.error, elapsed_ms: v.elapsed_ms, tokens: v.tokens }])
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const resp = await fetch('/api/reports', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ symbol: normalizedTsCode, content })
|
||||||
|
})
|
||||||
|
if (!resp.ok) {
|
||||||
|
const t = await resp.json().catch(() => ({}))
|
||||||
|
throw new Error(t?.error || `HTTP ${resp.status}`)
|
||||||
|
}
|
||||||
|
const data = await resp.json()
|
||||||
|
setSaveMsg('保存成功')
|
||||||
|
return data
|
||||||
|
} catch (e) {
|
||||||
|
setSaveMsg(e instanceof Error ? e.message : '保存失败')
|
||||||
|
} finally {
|
||||||
|
setSaving(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const runFullAnalysis = async () => {
|
const runFullAnalysis = async () => {
|
||||||
if (!isChina || !financials || !analysisConfig?.analysis_modules || isAnalysisRunningRef.current) {
|
if (!isChina || !financials || !analysisConfig?.analysis_modules || isAnalysisRunningRef.current) {
|
||||||
return;
|
return;
|
||||||
@ -131,7 +171,6 @@ export default function ReportPage() {
|
|||||||
fullAnalysisTriggeredRef.current = true;
|
fullAnalysisTriggeredRef.current = true;
|
||||||
runFullAnalysis();
|
runFullAnalysis();
|
||||||
}
|
}
|
||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
||||||
}, [financials]);
|
}, [financials]);
|
||||||
|
|
||||||
// 计算完成比例
|
// 计算完成比例
|
||||||
@ -157,7 +196,6 @@ export default function ReportPage() {
|
|||||||
const v = typeof ms === 'number' ? ms : 0;
|
const v = typeof ms === 'number' ? ms : 0;
|
||||||
if (v >= 1000) {
|
if (v >= 1000) {
|
||||||
const s = v / 1000;
|
const s = v / 1000;
|
||||||
// 保留两位小数
|
|
||||||
return `${s.toFixed(2)} s`;
|
return `${s.toFixed(2)} s`;
|
||||||
}
|
}
|
||||||
return `${v} ms`;
|
return `${v} ms`;
|
||||||
@ -185,7 +223,6 @@ export default function ReportPage() {
|
|||||||
return map;
|
return map;
|
||||||
}, [financialConfig]);
|
}, [financialConfig]);
|
||||||
|
|
||||||
// 创建 tushareParam 到 分组 名称的映射(用于数值缩放判断)
|
|
||||||
const metricGroupMap = useMemo(() => {
|
const metricGroupMap = useMemo(() => {
|
||||||
if (!financialConfig?.api_groups) return {} as Record<string, string>;
|
if (!financialConfig?.api_groups) return {} as Record<string, string>;
|
||||||
const map: Record<string, string> = {};
|
const map: Record<string, string> = {};
|
||||||
@ -199,19 +236,16 @@ export default function ReportPage() {
|
|||||||
return map;
|
return map;
|
||||||
}, [financialConfig]);
|
}, [financialConfig]);
|
||||||
|
|
||||||
// 数字格式化(千分位,保留两位小数)
|
|
||||||
const numberFormatter = useMemo(() => new Intl.NumberFormat('zh-CN', {
|
const numberFormatter = useMemo(() => new Intl.NumberFormat('zh-CN', {
|
||||||
minimumFractionDigits: 2,
|
minimumFractionDigits: 2,
|
||||||
maximumFractionDigits: 2,
|
maximumFractionDigits: 2,
|
||||||
}), []);
|
}), []);
|
||||||
|
|
||||||
// 数字格式化(千分位,不带小数)- 用于市值
|
|
||||||
const integerFormatter = useMemo(() => new Intl.NumberFormat('zh-CN', {
|
const integerFormatter = useMemo(() => new Intl.NumberFormat('zh-CN', {
|
||||||
minimumFractionDigits: 0,
|
minimumFractionDigits: 0,
|
||||||
maximumFractionDigits: 0,
|
maximumFractionDigits: 0,
|
||||||
}), []);
|
}), []);
|
||||||
|
|
||||||
// 规范化 Markdown(修复 AI 输出导致的有序列表解析问题)
|
|
||||||
const normalizeMarkdown = useMemo(() => {
|
const normalizeMarkdown = useMemo(() => {
|
||||||
return (content: string): string => {
|
return (content: string): string => {
|
||||||
if (!content) return content;
|
if (!content) return content;
|
||||||
@ -220,10 +254,7 @@ export default function ReportPage() {
|
|||||||
|
|
||||||
for (let i = 0; i < lines.length; i += 1) {
|
for (let i = 0; i < lines.length; i += 1) {
|
||||||
let line = lines[i];
|
let line = lines[i];
|
||||||
// 将"1、"/"1 、"等替换为"1. "
|
|
||||||
line = line.replace(/^(\s*)(\d+)[、,]\s*/u, '$1$2. ');
|
line = line.replace(/^(\s*)(\d+)[、,]\s*/u, '$1$2. ');
|
||||||
|
|
||||||
// 如果是独立一行仅有 "1." 之类的编号,则与下一行合并
|
|
||||||
const onlyIndexMatch = line.match(/^\s*(\d+)\.[\s\u3000]*$/u);
|
const onlyIndexMatch = line.match(/^\s*(\d+)\.[\s\u3000]*$/u);
|
||||||
if (onlyIndexMatch) {
|
if (onlyIndexMatch) {
|
||||||
const next = lines[i + 1] ?? '';
|
const next = lines[i + 1] ?? '';
|
||||||
@ -231,68 +262,59 @@ export default function ReportPage() {
|
|||||||
i += 1;
|
i += 1;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
out.push(line);
|
out.push(line);
|
||||||
}
|
}
|
||||||
|
|
||||||
let text = out.join('\n');
|
let text = out.join('\n');
|
||||||
// 在以"1."开头的列表前补一个空行,防止被前段落粘连
|
|
||||||
text = text.replace(/([^\n])\n(\s*\d+\.\s)/g, (_m, a, b) => `${a}\n\n${b}`);
|
text = text.replace(/([^\n])\n(\s*\d+\.\s)/g, (_m, a, b) => `${a}\n\n${b}`);
|
||||||
return text;
|
return text;
|
||||||
};
|
};
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
// 取消独立公司简介加载;统一纳入顺序分析
|
|
||||||
|
|
||||||
// 检查是否有正在进行的任务
|
|
||||||
const hasRunningTask = useMemo(() => {
|
const hasRunningTask = useMemo(() => {
|
||||||
if (currentAnalysisTask !== null) return true;
|
if (currentAnalysisTask !== null) return true;
|
||||||
if (analysisRecords.some(r => r.status === 'running')) return true;
|
if (analysisRecords.some(r => r.status === 'running')) return true;
|
||||||
return false;
|
return false;
|
||||||
}, [currentAnalysisTask, analysisRecords]);
|
}, [currentAnalysisTask, analysisRecords]);
|
||||||
|
|
||||||
|
// 全部任务是否完成(无运行中任务,且所有分析记录为 done 或 error)
|
||||||
|
const allTasksCompleted = useMemo(() => {
|
||||||
|
if (analysisRecords.length === 0) return false;
|
||||||
|
const allDoneOrErrored = analysisRecords.every(r => r.status === 'done' || r.status === 'error');
|
||||||
|
return allDoneOrErrored && !hasRunningTask && currentAnalysisTask === null;
|
||||||
|
}, [analysisRecords, hasRunningTask, currentAnalysisTask]);
|
||||||
|
|
||||||
|
// 所有任务完成时,停止计时器
|
||||||
|
useEffect(() => {
|
||||||
|
if (allTasksCompleted) {
|
||||||
|
setStartTime(null);
|
||||||
|
}
|
||||||
|
}, [allTasksCompleted]);
|
||||||
|
|
||||||
// 计时器效果
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!startTime) return;
|
if (!startTime) return;
|
||||||
|
|
||||||
const interval = setInterval(() => {
|
const interval = setInterval(() => {
|
||||||
const now = Date.now();
|
const now = Date.now();
|
||||||
const elapsed = Math.floor((now - startTime) / 1000);
|
const elapsed = Math.floor((now - startTime) / 1000);
|
||||||
setElapsedSeconds(elapsed);
|
setElapsedSeconds(elapsed);
|
||||||
}, 1000);
|
}, 1000);
|
||||||
|
|
||||||
return () => clearInterval(interval);
|
return () => clearInterval(interval);
|
||||||
}, [startTime]);
|
}, [startTime]);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// 重试单个分析任务
|
|
||||||
const retryAnalysis = async (analysisType: string) => {
|
const retryAnalysis = async (analysisType: string) => {
|
||||||
if (!isChina || !financials || !analysisConfig?.analysis_modules) {
|
if (!isChina || !financials || !analysisConfig?.analysis_modules) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// 允许 company_profile 通过通用通道重试
|
|
||||||
|
|
||||||
// 清除该任务的已完成标记,允许重新执行
|
|
||||||
analysisFetchedRefs.current[analysisType] = false;
|
analysisFetchedRefs.current[analysisType] = false;
|
||||||
|
|
||||||
// 清除错误状态
|
|
||||||
setAnalysisStates(prev => ({
|
setAnalysisStates(prev => ({
|
||||||
...prev,
|
...prev,
|
||||||
[analysisType]: { content: '', loading: true, error: null }
|
[analysisType]: { content: '', loading: true, error: null }
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// 移除旧的错误记录
|
|
||||||
setAnalysisRecords(prev => prev.filter(record => record.type !== analysisType));
|
setAnalysisRecords(prev => prev.filter(record => record.type !== analysisType));
|
||||||
|
|
||||||
const analysisName =
|
const analysisName =
|
||||||
analysisConfig.analysis_modules[analysisType]?.name || analysisType;
|
analysisConfig.analysis_modules[analysisType]?.name || analysisType;
|
||||||
const startTime = new Date().toISOString();
|
const startTime = new Date().toISOString();
|
||||||
|
|
||||||
// 设置当前任务
|
|
||||||
setCurrentAnalysisTask(analysisType);
|
setCurrentAnalysisTask(analysisType);
|
||||||
|
|
||||||
// 添加执行记录
|
|
||||||
setAnalysisRecords(prev => [...prev, {
|
setAnalysisRecords(prev => [...prev, {
|
||||||
type: analysisType,
|
type: analysisType,
|
||||||
name: analysisName,
|
name: analysisName,
|
||||||
@ -301,74 +323,59 @@ export default function ReportPage() {
|
|||||||
}]);
|
}]);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
const startedMsLocal = Date.now();
|
||||||
const response = await fetch(
|
const response = await fetch(
|
||||||
`/api/financials/china/${normalizedTsCode}/analysis/${analysisType}?company_name=${encodeURIComponent(financials?.name || normalizedTsCode)}`
|
`/api/financials/china/${normalizedTsCode}/analysis/${analysisType}/stream?company_name=${encodeURIComponent(financials?.name || normalizedTsCode)}`
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
throw new Error(`HTTP error! status: ${response.status}`);
|
throw new Error(`HTTP error! status: ${response.status}`);
|
||||||
}
|
}
|
||||||
|
const reader = response.body?.getReader();
|
||||||
const data: AnalysisResponse = await response.json();
|
const decoder = new TextDecoder();
|
||||||
const endTime = new Date().toISOString();
|
let aggregate = '';
|
||||||
|
if (reader) {
|
||||||
if (data.success) {
|
while (true) {
|
||||||
// 更新状态
|
const { value, done } = await reader.read();
|
||||||
setAnalysisStates(prev => ({
|
if (done) break;
|
||||||
...prev,
|
const chunk = decoder.decode(value, { stream: true });
|
||||||
[analysisType]: {
|
aggregate += chunk;
|
||||||
content: data.content,
|
const snapshot = aggregate;
|
||||||
loading: false,
|
setAnalysisStates(prev => ({
|
||||||
error: null,
|
...prev,
|
||||||
elapsed_ms: data.elapsed_ms,
|
[analysisType]: {
|
||||||
tokens: data.tokens
|
...prev[analysisType],
|
||||||
}
|
content: snapshot,
|
||||||
}));
|
loading: true,
|
||||||
|
error: null,
|
||||||
// 更新执行记录
|
}
|
||||||
setAnalysisRecords(prev => prev.map(record =>
|
}));
|
||||||
record.type === analysisType
|
}
|
||||||
? {
|
|
||||||
...record,
|
|
||||||
status: 'done',
|
|
||||||
end_ts: endTime,
|
|
||||||
duration_ms: data.elapsed_ms,
|
|
||||||
tokens: data.tokens
|
|
||||||
}
|
|
||||||
: record
|
|
||||||
));
|
|
||||||
} else {
|
|
||||||
// 更新状态
|
|
||||||
setAnalysisStates(prev => ({
|
|
||||||
...prev,
|
|
||||||
[analysisType]: {
|
|
||||||
content: '',
|
|
||||||
loading: false,
|
|
||||||
error: data.error || '生成失败',
|
|
||||||
elapsed_ms: data.elapsed_ms,
|
|
||||||
tokens: data.tokens
|
|
||||||
}
|
|
||||||
}));
|
|
||||||
|
|
||||||
// 更新执行记录
|
|
||||||
setAnalysisRecords(prev => prev.map(record =>
|
|
||||||
record.type === analysisType
|
|
||||||
? {
|
|
||||||
...record,
|
|
||||||
status: 'error',
|
|
||||||
end_ts: endTime,
|
|
||||||
duration_ms: data.elapsed_ms,
|
|
||||||
tokens: data.tokens,
|
|
||||||
error: data.error || '生成失败'
|
|
||||||
}
|
|
||||||
: record
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
const endTime = new Date().toISOString();
|
||||||
|
const elapsedMs = Date.now() - startedMsLocal;
|
||||||
|
setAnalysisStates(prev => ({
|
||||||
|
...prev,
|
||||||
|
[analysisType]: {
|
||||||
|
...prev[analysisType],
|
||||||
|
content: aggregate,
|
||||||
|
loading: false,
|
||||||
|
error: null,
|
||||||
|
elapsed_ms: elapsedMs,
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
setAnalysisRecords(prev => prev.map(record =>
|
||||||
|
record.type === analysisType
|
||||||
|
? {
|
||||||
|
...record,
|
||||||
|
status: 'done',
|
||||||
|
end_ts: endTime,
|
||||||
|
duration_ms: elapsedMs,
|
||||||
|
}
|
||||||
|
: record
|
||||||
|
));
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
const errorMessage = err instanceof Error ? err.message : '加载失败';
|
const errorMessage = err instanceof Error ? err.message : '加载失败';
|
||||||
const endTime = new Date().toISOString();
|
const endTime = new Date().toISOString();
|
||||||
|
|
||||||
// 更新状态
|
|
||||||
setAnalysisStates(prev => ({
|
setAnalysisStates(prev => ({
|
||||||
...prev,
|
...prev,
|
||||||
[analysisType]: {
|
[analysisType]: {
|
||||||
@ -377,8 +384,6 @@ export default function ReportPage() {
|
|||||||
error: errorMessage
|
error: errorMessage
|
||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// 更新执行记录
|
|
||||||
setAnalysisRecords(prev => prev.map(record =>
|
setAnalysisRecords(prev => prev.map(record =>
|
||||||
record.type === analysisType
|
record.type === analysisType
|
||||||
? {
|
? {
|
||||||
@ -390,64 +395,44 @@ export default function ReportPage() {
|
|||||||
: record
|
: record
|
||||||
));
|
));
|
||||||
} finally {
|
} finally {
|
||||||
// 清除当前任务
|
|
||||||
setCurrentAnalysisTask(null);
|
setCurrentAnalysisTask(null);
|
||||||
// 标记为已完成(无论成功还是失败)
|
|
||||||
analysisFetchedRefs.current[analysisType] = true;
|
analysisFetchedRefs.current[analysisType] = true;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// 顺序执行各个分析
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
// 确保所有必需的数据都已加载
|
|
||||||
if (!isChina || isLoading || error || !financials || !analysisConfig?.analysis_modules || analysisTypes.length === 0) {
|
if (!isChina || isLoading || error || !financials || !analysisConfig?.analysis_modules || analysisTypes.length === 0) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// 如果已经有分析任务正在运行,则跳过
|
|
||||||
if (isAnalysisRunningRef.current) {
|
if (isAnalysisRunningRef.current) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const runAnalysesSequentially = async () => {
|
const runAnalysesSequentially = async () => {
|
||||||
// 设置运行标志,防止并发执行
|
|
||||||
if (isAnalysisRunningRef.current) {
|
if (isAnalysisRunningRef.current) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
isAnalysisRunningRef.current = true;
|
isAnalysisRunningRef.current = true;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (!startTime) {
|
if (!stopRequestedRef.current && !startTime) {
|
||||||
setStartTime(Date.now());
|
setStartTime(Date.now());
|
||||||
}
|
}
|
||||||
for (let i = 0; i < analysisTypes.length; i++) {
|
for (let i = 0; i < analysisTypes.length; i++) {
|
||||||
const analysisType = analysisTypes[i];
|
const analysisType = analysisTypes[i];
|
||||||
|
|
||||||
if (stopRequestedRef.current) {
|
if (stopRequestedRef.current) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (analysisFetchedRefs.current[analysisType]) {
|
if (analysisFetchedRefs.current[analysisType]) {
|
||||||
continue; // 已加载过,跳过
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ensure refs and config are defined before proceeding
|
|
||||||
if (!analysisFetchedRefs.current || !analysisConfig?.analysis_modules) {
|
if (!analysisFetchedRefs.current || !analysisConfig?.analysis_modules) {
|
||||||
console.error("分析配置或refs未初始化,无法进行分析。");
|
console.error("分析配置或refs未初始化,无法进行分析。");
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// 记录当前类型
|
|
||||||
currentAnalysisTypeRef.current = analysisType;
|
currentAnalysisTypeRef.current = analysisType;
|
||||||
const analysisName =
|
const analysisName =
|
||||||
analysisConfig.analysis_modules[analysisType]?.name || analysisType;
|
analysisConfig.analysis_modules[analysisType]?.name || analysisType;
|
||||||
const startTime = new Date().toISOString();
|
const startTime = new Date().toISOString();
|
||||||
|
|
||||||
// 设置当前任务
|
|
||||||
setCurrentAnalysisTask(analysisType);
|
setCurrentAnalysisTask(analysisType);
|
||||||
|
|
||||||
|
|
||||||
// 设置/更新执行记录为 running(避免重复项)
|
|
||||||
setAnalysisRecords(prev => {
|
setAnalysisRecords(prev => {
|
||||||
const next = [...prev];
|
const next = [...prev];
|
||||||
const idx = next.findIndex(r => r.type === analysisType);
|
const idx = next.findIndex(r => r.type === analysisType);
|
||||||
@ -464,82 +449,66 @@ export default function ReportPage() {
|
|||||||
}
|
}
|
||||||
return next;
|
return next;
|
||||||
});
|
});
|
||||||
|
|
||||||
// 设置加载状态
|
|
||||||
setAnalysisStates(prev => ({
|
setAnalysisStates(prev => ({
|
||||||
...prev,
|
...prev,
|
||||||
[analysisType]: { content: '', loading: true, error: null }
|
[analysisType]: { content: '', loading: true, error: null }
|
||||||
}));
|
}));
|
||||||
|
|
||||||
try {
|
try {
|
||||||
abortControllerRef.current?.abort();
|
abortControllerRef.current?.abort();
|
||||||
abortControllerRef.current = new AbortController();
|
abortControllerRef.current = new AbortController();
|
||||||
|
const startedMsLocal = Date.now();
|
||||||
const response = await fetch(
|
const response = await fetch(
|
||||||
`/api/financials/china/${normalizedTsCode}/analysis/${analysisType}?company_name=${encodeURIComponent(financials?.name || normalizedTsCode)}`,
|
`/api/financials/china/${normalizedTsCode}/analysis/${analysisType}/stream?company_name=${encodeURIComponent(financials?.name || normalizedTsCode)}`,
|
||||||
{ signal: abortControllerRef.current.signal }
|
{ signal: abortControllerRef.current.signal }
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
throw new Error(`HTTP error! status: ${response.status}`);
|
throw new Error(`HTTP error! status: ${response.status}`);
|
||||||
}
|
}
|
||||||
|
const reader = response.body?.getReader();
|
||||||
const data: AnalysisResponse = await response.json();
|
const decoder = new TextDecoder();
|
||||||
const endTime = new Date().toISOString();
|
let aggregate = '';
|
||||||
|
if (reader) {
|
||||||
if (data.success) {
|
// 持续读取并追加到内容
|
||||||
// 更新状态
|
while (true) {
|
||||||
setAnalysisStates(prev => ({
|
const { value, done } = await reader.read();
|
||||||
...prev,
|
if (done) break;
|
||||||
[analysisType]: {
|
const chunk = decoder.decode(value, { stream: true });
|
||||||
content: data.content,
|
aggregate += chunk;
|
||||||
loading: false,
|
const snapshot = aggregate;
|
||||||
error: null,
|
setAnalysisStates(prev => ({
|
||||||
elapsed_ms: data.elapsed_ms,
|
...prev,
|
||||||
tokens: data.tokens
|
[analysisType]: {
|
||||||
}
|
...prev[analysisType],
|
||||||
}));
|
content: snapshot,
|
||||||
|
loading: true,
|
||||||
// 更新执行记录
|
error: null,
|
||||||
setAnalysisRecords(prev => prev.map(record =>
|
}
|
||||||
record.type === analysisType
|
}));
|
||||||
? {
|
}
|
||||||
...record,
|
|
||||||
status: 'done',
|
|
||||||
end_ts: endTime,
|
|
||||||
duration_ms: data.elapsed_ms,
|
|
||||||
tokens: data.tokens
|
|
||||||
}
|
|
||||||
: record
|
|
||||||
));
|
|
||||||
} else {
|
|
||||||
// 更新状态
|
|
||||||
setAnalysisStates(prev => ({
|
|
||||||
...prev,
|
|
||||||
[analysisType]: {
|
|
||||||
content: '',
|
|
||||||
loading: false,
|
|
||||||
error: data.error || '生成失败',
|
|
||||||
elapsed_ms: data.elapsed_ms,
|
|
||||||
tokens: data.tokens
|
|
||||||
}
|
|
||||||
}));
|
|
||||||
|
|
||||||
// 更新执行记录
|
|
||||||
setAnalysisRecords(prev => prev.map(record =>
|
|
||||||
record.type === analysisType
|
|
||||||
? {
|
|
||||||
...record,
|
|
||||||
status: 'error',
|
|
||||||
end_ts: endTime,
|
|
||||||
duration_ms: data.elapsed_ms,
|
|
||||||
tokens: data.tokens,
|
|
||||||
error: data.error || '生成失败'
|
|
||||||
}
|
|
||||||
: record
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
const endTime = new Date().toISOString();
|
||||||
|
const elapsedMs = Date.now() - startedMsLocal;
|
||||||
|
setAnalysisStates(prev => ({
|
||||||
|
...prev,
|
||||||
|
[analysisType]: {
|
||||||
|
...prev[analysisType],
|
||||||
|
content: aggregate,
|
||||||
|
loading: false,
|
||||||
|
error: null,
|
||||||
|
elapsed_ms: elapsedMs,
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
setAnalysisRecords(prev => prev.map(record =>
|
||||||
|
record.type === analysisType
|
||||||
|
? {
|
||||||
|
...record,
|
||||||
|
status: 'done',
|
||||||
|
end_ts: endTime,
|
||||||
|
duration_ms: elapsedMs,
|
||||||
|
}
|
||||||
|
: record
|
||||||
|
));
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
// 若为主动中止,则把当前任务恢复为待处理并退出循环
|
|
||||||
if (err && typeof err === 'object' && (err as any).name === 'AbortError') {
|
if (err && typeof err === 'object' && (err as any).name === 'AbortError') {
|
||||||
setAnalysisStates(prev => ({
|
setAnalysisStates(prev => ({
|
||||||
...prev,
|
...prev,
|
||||||
@ -555,8 +524,6 @@ export default function ReportPage() {
|
|||||||
}
|
}
|
||||||
const errorMessage = err instanceof Error ? err.message : '加载失败';
|
const errorMessage = err instanceof Error ? err.message : '加载失败';
|
||||||
const endTime = new Date().toISOString();
|
const endTime = new Date().toISOString();
|
||||||
|
|
||||||
// 更新状态
|
|
||||||
setAnalysisStates(prev => ({
|
setAnalysisStates(prev => ({
|
||||||
...prev,
|
...prev,
|
||||||
[analysisType]: {
|
[analysisType]: {
|
||||||
@ -565,8 +532,6 @@ export default function ReportPage() {
|
|||||||
error: errorMessage
|
error: errorMessage
|
||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// 更新执行记录
|
|
||||||
setAnalysisRecords(prev => prev.map(record =>
|
setAnalysisRecords(prev => prev.map(record =>
|
||||||
record.type === analysisType
|
record.type === analysisType
|
||||||
? {
|
? {
|
||||||
@ -578,20 +543,16 @@ export default function ReportPage() {
|
|||||||
: record
|
: record
|
||||||
));
|
));
|
||||||
} finally {
|
} finally {
|
||||||
// 清除当前任务
|
|
||||||
setCurrentAnalysisTask(null);
|
setCurrentAnalysisTask(null);
|
||||||
currentAnalysisTypeRef.current = null;
|
currentAnalysisTypeRef.current = null;
|
||||||
analysisFetchedRefs.current[analysisType] = true;
|
analysisFetchedRefs.current[analysisType] = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
// 清除运行标志
|
|
||||||
isAnalysisRunningRef.current = false;
|
isAnalysisRunningRef.current = false;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
runAnalysesSequentially();
|
runAnalysesSequentially();
|
||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
||||||
}, [isChina, isLoading, error, financials, analysisConfig, analysisTypes, normalizedTsCode, manualRunKey]);
|
}, [isChina, isLoading, error, financials, analysisConfig, analysisTypes, normalizedTsCode, manualRunKey]);
|
||||||
|
|
||||||
const stopAll = () => {
|
const stopAll = () => {
|
||||||
@ -603,14 +564,12 @@ export default function ReportPage() {
|
|||||||
analysisFetchedRefs.current[currentAnalysisTypeRef.current] = false;
|
analysisFetchedRefs.current[currentAnalysisTypeRef.current] = false;
|
||||||
}
|
}
|
||||||
setCurrentAnalysisTask(null);
|
setCurrentAnalysisTask(null);
|
||||||
// 暂停计时器
|
|
||||||
setStartTime(null);
|
setStartTime(null);
|
||||||
};
|
};
|
||||||
|
|
||||||
const continuePending = () => {
|
const continuePending = () => {
|
||||||
if (isAnalysisRunningRef.current) return;
|
if (isAnalysisRunningRef.current) return;
|
||||||
stopRequestedRef.current = false;
|
stopRequestedRef.current = false;
|
||||||
// 恢复计时器:保持累计秒数继续计时
|
|
||||||
setStartTime((prev) => (prev == null ? Date.now() - elapsedSeconds * 1000 : prev));
|
setStartTime((prev) => (prev == null ? Date.now() - elapsedSeconds * 1000 : prev));
|
||||||
setManualRunKey((k) => k + 1);
|
setManualRunKey((k) => k + 1);
|
||||||
};
|
};
|
||||||
@ -618,7 +577,6 @@ export default function ReportPage() {
|
|||||||
return (
|
return (
|
||||||
<div className="space-y-4">
|
<div className="space-y-4">
|
||||||
<div className="flex items-stretch justify-between gap-4">
|
<div className="flex items-stretch justify-between gap-4">
|
||||||
{/* 左侧:报告信息卡片 */}
|
|
||||||
<Card className="flex-1">
|
<Card className="flex-1">
|
||||||
<CardHeader>
|
<CardHeader>
|
||||||
<CardTitle className="text-xl">报告页面</CardTitle>
|
<CardTitle className="text-xl">报告页面</CardTitle>
|
||||||
@ -647,10 +605,9 @@ export default function ReportPage() {
|
|||||||
)}
|
)}
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
</CardContent>
|
</CardContent>
|
||||||
</Card>
|
</Card>
|
||||||
|
|
||||||
{/* 中间:操作卡片 */}
|
|
||||||
{isChina && (
|
{isChina && (
|
||||||
<Card className="w-40 flex-shrink-0">
|
<Card className="w-40 flex-shrink-0">
|
||||||
<CardContent className="flex flex-col gap-2">
|
<CardContent className="flex flex-col gap-2">
|
||||||
@ -666,8 +623,6 @@ export default function ReportPage() {
|
|||||||
</CardContent>
|
</CardContent>
|
||||||
</Card>
|
</Card>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{/* 右侧:任务状态 */}
|
|
||||||
{isChina && (
|
{isChina && (
|
||||||
<Card className="w-80">
|
<Card className="w-80">
|
||||||
<CardHeader className="flex flex-col space-y-2 pb-2">
|
<CardHeader className="flex flex-col space-y-2 pb-2">
|
||||||
@ -685,10 +640,16 @@ export default function ReportPage() {
|
|||||||
style={{ width: `${completionProgress}%` }}
|
style={{ width: `${completionProgress}%` }}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
{/* 操作按钮已移至左侧信息卡片 */}
|
{allTasksCompleted && (
|
||||||
|
<div className="pt-2">
|
||||||
|
<Button onClick={saveReport} disabled={saving} variant="outline">
|
||||||
|
{saving ? '保存中...' : '保存报告'}
|
||||||
|
</Button>
|
||||||
|
{saveMsg && <span className="ml-2 text-xs text-muted-foreground">{saveMsg}</span>}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</CardHeader>
|
</CardHeader>
|
||||||
<CardContent className="space-y-2">
|
<CardContent className="space-y-2">
|
||||||
{/* 当前正在进行的任务 */}
|
|
||||||
{currentAnalysisTask && analysisConfig && (
|
{currentAnalysisTask && analysisConfig && (
|
||||||
(() => {
|
(() => {
|
||||||
const analysisName = analysisConfig.analysis_modules[currentAnalysisTask]?.name || currentAnalysisTask;
|
const analysisName = analysisConfig.analysis_modules[currentAnalysisTask]?.name || currentAnalysisTask;
|
||||||
@ -704,44 +665,6 @@ export default function ReportPage() {
|
|||||||
);
|
);
|
||||||
})()
|
})()
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{/* 最近一个已完成的任务 */}
|
|
||||||
{(() => {
|
|
||||||
// 找到最近一个已完成的任务(按结束时间排序)
|
|
||||||
const completedRecords = analysisRecords
|
|
||||||
.filter(r => r.status === 'done' && r.end_ts)
|
|
||||||
.sort((a, b) => {
|
|
||||||
if (!a.end_ts || !b.end_ts) return 0;
|
|
||||||
return new Date(b.end_ts).getTime() - new Date(a.end_ts).getTime();
|
|
||||||
});
|
|
||||||
|
|
||||||
if (completedRecords.length > 0) {
|
|
||||||
const latestRecord = completedRecords[0];
|
|
||||||
return (
|
|
||||||
<div className="flex items-center gap-2 text-sm">
|
|
||||||
<CheckCircle className="size-4 text-green-600" />
|
|
||||||
<div>
|
|
||||||
<div className="font-medium">{latestRecord.name}</div>
|
|
||||||
<div className="text-xs text-muted-foreground">已完成</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (financials && !isLoading && !error) {
|
|
||||||
return (
|
|
||||||
<div className="flex items-center gap-2 text-sm">
|
|
||||||
<CheckCircle className="size-4 text-green-600" />
|
|
||||||
<div>
|
|
||||||
<div className="font-medium">财务数据获取</div>
|
|
||||||
<div className="text-xs text-muted-foreground">已完成</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
})()}
|
|
||||||
</CardContent>
|
</CardContent>
|
||||||
</Card>
|
</Card>
|
||||||
)}
|
)}
|
||||||
|
|||||||
767
frontend/src/app/reports/[id]/page.tsx
Normal file
767
frontend/src/app/reports/[id]/page.tsx
Normal file
@ -0,0 +1,767 @@
|
|||||||
|
import { prisma } from '../../../lib/prisma'
|
||||||
|
import ReactMarkdown from 'react-markdown'
|
||||||
|
import remarkGfm from 'remark-gfm'
|
||||||
|
import { Tabs, TabsList, TabsTrigger, TabsContent } from '@/components/ui/tabs'
|
||||||
|
import { Card, CardHeader, CardTitle, CardContent } from '@/components/ui/card'
|
||||||
|
import { Table, TableHeader, TableBody, TableHead, TableRow, TableCell } from '@/components/ui/table'
|
||||||
|
|
||||||
|
type Report = {
|
||||||
|
id: string
|
||||||
|
symbol: string
|
||||||
|
content: any
|
||||||
|
createdAt: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export default async function ReportDetailPage({ params }: { params: Promise<{ id: string }> }) {
|
||||||
|
const { id } = await params
|
||||||
|
const data = await prisma.report.findUnique({ where: { id } })
|
||||||
|
|
||||||
|
if (!data) {
|
||||||
|
return <div className="text-sm text-red-600">未找到报告</div>
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = (data.content ?? {}) as any
|
||||||
|
const analyses = (content?.analyses ?? {}) as Record<string, any>
|
||||||
|
|
||||||
|
// 规范化显示顺序(与生成报告时一致的中文 Tabs 次序)
|
||||||
|
const ordered = [
|
||||||
|
{ id: 'financial', label: '财务数据' },
|
||||||
|
{ id: 'company_profile', label: '公司简介' },
|
||||||
|
{ id: 'fundamentals', label: '基本面分析' },
|
||||||
|
{ id: 'bullish', label: '看涨分析' },
|
||||||
|
{ id: 'bearish', label: '看跌分析' },
|
||||||
|
{ id: 'market', label: '市场分析' },
|
||||||
|
{ id: 'news', label: '新闻分析' },
|
||||||
|
{ id: 'trading', label: '交易分析' },
|
||||||
|
{ id: 'insiders_institutions', label: '内部人及机构动向分析' },
|
||||||
|
{ id: 'final_conclusion', label: '最终结论' },
|
||||||
|
{ id: 'meta', label: '元数据' },
|
||||||
|
] as const
|
||||||
|
|
||||||
|
// 每个规范化 id 对应的候选后端 key(兼容不同命名)
|
||||||
|
const candidateKeys: Record<string, string[]> = {
|
||||||
|
company_profile: ['company_profile'],
|
||||||
|
fundamentals: ['fundamental_analysis', 'fundamentals_analysis', 'basic_analysis', 'basics_analysis'],
|
||||||
|
bullish: ['bullish_analysis', 'bullish_case', 'bull_case'],
|
||||||
|
bearish: ['bearish_analysis', 'bearish_case', 'bear_case'],
|
||||||
|
market: ['market_analysis'],
|
||||||
|
news: ['news_analysis'],
|
||||||
|
trading: ['trading_analysis'],
|
||||||
|
insiders_institutions: ['insider_institutional', 'insiders_institutions_analysis', 'insider_institution_analysis', 'insider_analysis'],
|
||||||
|
final_conclusion: ['final_conclusion', 'conclusion', 'investment_thesis'],
|
||||||
|
}
|
||||||
|
|
||||||
|
const findKey = (id: string): string | null => {
|
||||||
|
const c = candidateKeys[id]
|
||||||
|
if (!c) return null
|
||||||
|
for (const k of c) {
|
||||||
|
if (Object.prototype.hasOwnProperty.call(analyses, k)) return k
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
// 去掉正文开头重复的大标题(Markdown 以 # 开头的行)
|
||||||
|
const stripTopHeadings = (text: string): string => {
|
||||||
|
const lines = String(text || '').split(/\r?\n/)
|
||||||
|
let i = 0
|
||||||
|
while (i < lines.length) {
|
||||||
|
const t = lines[i]?.trim() || ''
|
||||||
|
if (t === '') { i += 1; continue }
|
||||||
|
if (/^#{1,6}\s+/.test(t)) { i += 1; continue }
|
||||||
|
break
|
||||||
|
}
|
||||||
|
return lines.slice(i).join('\n').trimStart()
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-4">
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<h1 className="text-2xl font-semibold">报告详情</h1>
|
||||||
|
<div className="text-sm text-muted-foreground">{new Date(data.createdAt).toLocaleString()}</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-base">基本信息</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent className="text-sm space-y-1">
|
||||||
|
<div className="flex flex-wrap items-center gap-4">
|
||||||
|
<span>股票代码:<span className="font-medium">{data.symbol}</span></span>
|
||||||
|
{content?.normalizedSymbol && (
|
||||||
|
<span>标准代码:<span className="font-medium">{String(content.normalizedSymbol)}</span></span>
|
||||||
|
)}
|
||||||
|
{(() => {
|
||||||
|
const companyName = (content?.financials?.name as string | undefined) || (content as any)?.company_name || (content as any)?.companyName
|
||||||
|
return companyName ? (
|
||||||
|
<span>公司名称:<span className="font-medium">{companyName}</span></span>
|
||||||
|
) : null
|
||||||
|
})()}
|
||||||
|
{content?.market && (
|
||||||
|
<span>市场:<span className="font-medium">{String(content.market)}</span></span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
<Tabs defaultValue={'financial'} className="mt-2">
|
||||||
|
<TabsList className="flex-wrap">
|
||||||
|
{ordered.map((o, idx) => (
|
||||||
|
<TabsTrigger key={o.id} value={o.id}>{`${idx + 1}. ${o.label}`}</TabsTrigger>
|
||||||
|
))}
|
||||||
|
</TabsList>
|
||||||
|
|
||||||
|
<TabsContent value="financial" className="space-y-4">
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-base">财务数据(保存自读取结果)</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent className="space-y-4">
|
||||||
|
{(() => {
|
||||||
|
const fin = (content?.financials ?? null) as null | {
|
||||||
|
ts_code?: string
|
||||||
|
name?: string
|
||||||
|
series?: Record<string, Array<{ year: string; value: number | null; month?: number | null }>>
|
||||||
|
meta?: any
|
||||||
|
}
|
||||||
|
|
||||||
|
const series = fin?.series || {}
|
||||||
|
const allPoints = Object.values(series).flat() as Array<{ year: string; value: number | null; month?: number | null }>
|
||||||
|
const years = Array.from(new Set(allPoints.map(p => p?.year).filter(Boolean) as string[])).sort((a, b) => Number(b) - Number(a))
|
||||||
|
|
||||||
|
const numberFormatter = new Intl.NumberFormat('zh-CN', { minimumFractionDigits: 2, maximumFractionDigits: 2 })
|
||||||
|
const integerFormatter = new Intl.NumberFormat('zh-CN', { minimumFractionDigits: 0, maximumFractionDigits: 0 })
|
||||||
|
|
||||||
|
const metricDisplayMap: Record<string, string> = {
|
||||||
|
roe: 'ROE',
|
||||||
|
roa: 'ROA',
|
||||||
|
roic: 'ROCE/ROIC',
|
||||||
|
grossprofit_margin: '毛利率',
|
||||||
|
netprofit_margin: '净利润率',
|
||||||
|
tr_yoy: '收入增速',
|
||||||
|
dt_netprofit_yoy: '净利润增速',
|
||||||
|
revenue: '收入',
|
||||||
|
n_income: '净利润',
|
||||||
|
n_cashflow_act: '经营现金流',
|
||||||
|
c_pay_acq_const_fiolta: '资本开支',
|
||||||
|
cash_div_tax: '分红',
|
||||||
|
buyback: '回购',
|
||||||
|
total_assets: '总资产',
|
||||||
|
total_hldr_eqy_exc_min_int: '股东权益',
|
||||||
|
goodwill: '商誉',
|
||||||
|
total_mv: '市值',
|
||||||
|
}
|
||||||
|
|
||||||
|
const metricGroupMap: Record<string, string> = {
|
||||||
|
revenue: 'income',
|
||||||
|
n_income: 'income',
|
||||||
|
total_assets: 'balancesheet',
|
||||||
|
total_hldr_eqy_exc_min_int: 'balancesheet',
|
||||||
|
goodwill: 'balancesheet',
|
||||||
|
n_cashflow_act: 'cashflow',
|
||||||
|
c_pay_acq_const_fiolta: 'cashflow',
|
||||||
|
}
|
||||||
|
|
||||||
|
if (years.length === 0) {
|
||||||
|
return (
|
||||||
|
<div className="text-sm text-muted-foreground">
|
||||||
|
暂无保存的财务数据。下次保存报告时会一并保存财务数据。
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const currentYearStr = String(new Date().getFullYear())
|
||||||
|
const getQuarter = (month: number | null | undefined) => {
|
||||||
|
if (month == null) return null
|
||||||
|
return Math.floor((month - 1) / 3) + 1
|
||||||
|
}
|
||||||
|
|
||||||
|
const PERCENT_KEYS = new Set(['roe','roa','roic','grossprofit_margin','netprofit_margin','tr_yoy','dt_netprofit_yoy'])
|
||||||
|
|
||||||
|
const ORDER: Array<{ key: string; label?: string; kind?: 'computed' }> = [
|
||||||
|
{ key: 'roe' },
|
||||||
|
{ key: 'roa' },
|
||||||
|
{ key: 'roic' },
|
||||||
|
{ key: 'grossprofit_margin' },
|
||||||
|
{ key: 'netprofit_margin' },
|
||||||
|
{ key: 'revenue' },
|
||||||
|
{ key: 'tr_yoy' },
|
||||||
|
{ key: 'n_income' },
|
||||||
|
{ key: 'dt_netprofit_yoy' },
|
||||||
|
{ key: 'n_cashflow_act' },
|
||||||
|
{ key: 'c_pay_acq_const_fiolta' },
|
||||||
|
{ key: '__free_cash_flow', label: '自由现金流', kind: 'computed' },
|
||||||
|
{ key: 'cash_div_tax', label: '分红' },
|
||||||
|
{ key: 'buyback', label: '回购' },
|
||||||
|
{ key: 'total_assets' },
|
||||||
|
{ key: 'total_hldr_eqy_exc_min_int' },
|
||||||
|
{ key: 'goodwill' },
|
||||||
|
]
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="overflow-x-auto">
|
||||||
|
<Table className="min-w-full text-sm">
|
||||||
|
<TableHeader>
|
||||||
|
<TableRow>
|
||||||
|
<TableHead className="text-left p-2">指标</TableHead>
|
||||||
|
{years.map((y) => {
|
||||||
|
const yearData = allPoints.find(p => p.year === y)
|
||||||
|
const isCurrent = y === currentYearStr
|
||||||
|
const quarter = yearData?.month ? getQuarter(yearData.month) : null
|
||||||
|
const label = isCurrent && quarter ? `${y} Q${quarter}` : y
|
||||||
|
return <TableHead key={y} className="text-right p-2">{label}</TableHead>
|
||||||
|
})}
|
||||||
|
</TableRow>
|
||||||
|
</TableHeader>
|
||||||
|
<TableBody>
|
||||||
|
{(() => {
|
||||||
|
const summaryRow = (
|
||||||
|
<TableRow key="__main_metrics_row" className="bg-muted hover:bg-purple-100">
|
||||||
|
<TableCell className="p-2 font-medium ">主要指标</TableCell>
|
||||||
|
{years.map((y) => (
|
||||||
|
<TableCell key={y} className="p-2"></TableCell>
|
||||||
|
))}
|
||||||
|
</TableRow>
|
||||||
|
)
|
||||||
|
|
||||||
|
const rows = ORDER.map(({ key, label, kind }) => {
|
||||||
|
const isComputed = kind === 'computed' && key === '__free_cash_flow'
|
||||||
|
const points = series[key] as Array<{ year?: string; value?: number | null }>|undefined
|
||||||
|
const operating = series['n_cashflow_act'] as Array<{ year?: string; value?: number | null }>|undefined
|
||||||
|
const capex = series['c_pay_acq_const_fiolta'] as Array<{ year?: string; value?: number | null }>|undefined
|
||||||
|
return (
|
||||||
|
<TableRow key={key} className="hover:bg-purple-100">
|
||||||
|
<TableCell className="p-2 text-muted-foreground">{label || metricDisplayMap[key] || key}</TableCell>
|
||||||
|
{years.map((y) => {
|
||||||
|
let v: number | null | undefined = undefined
|
||||||
|
if (isComputed) {
|
||||||
|
const op = operating?.find(p => p?.year === y)?.value ?? null
|
||||||
|
const cp = capex?.find(p => p?.year === y)?.value ?? null
|
||||||
|
v = (op == null || cp == null) ? null : (Number(op) - Number(cp))
|
||||||
|
} else {
|
||||||
|
v = points?.find(p => p?.year === y)?.value ?? null
|
||||||
|
}
|
||||||
|
|
||||||
|
const groupName = metricGroupMap[key]
|
||||||
|
const rawNum = typeof v === 'number' ? v : (v == null ? null : Number(v))
|
||||||
|
if (rawNum == null || Number.isNaN(rawNum)) {
|
||||||
|
return <TableCell key={y} className="text-right p-2">-</TableCell>
|
||||||
|
}
|
||||||
|
if (PERCENT_KEYS.has(key)) {
|
||||||
|
const perc = Math.abs(rawNum) <= 1 ? rawNum * 100 : rawNum
|
||||||
|
const text = Number.isFinite(perc) ? numberFormatter.format(perc) : '-'
|
||||||
|
const isGrowthRow = key === 'tr_yoy' || key === 'dt_netprofit_yoy'
|
||||||
|
if (isGrowthRow) {
|
||||||
|
const isNeg = typeof perc === 'number' && perc < 0
|
||||||
|
return (
|
||||||
|
<TableCell key={y} className="text-right p-2">
|
||||||
|
<span className={isNeg ? 'text-red-600 bg-red-100 italic' : 'text-blue-600 italic'}>{text}%</span>
|
||||||
|
</TableCell>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (key === 'roe' || key === 'roic') {
|
||||||
|
const highlight = typeof perc === 'number' && perc > 12
|
||||||
|
return (
|
||||||
|
<TableCell key={y} className={`text-right p-2 ${highlight ? 'bg-green-200' : ''}`}>{`${text}%`}</TableCell>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return <TableCell key={y} className="text-right p-2">{`${text}%`}</TableCell>
|
||||||
|
} else {
|
||||||
|
const isFinGroup = groupName === 'income' || groupName === 'balancesheet' || groupName === 'cashflow'
|
||||||
|
const scaled = key === 'total_mv' ? rawNum / 10000 : (isFinGroup || isComputed ? rawNum / 1e8 : rawNum)
|
||||||
|
const formatter = key === 'total_mv' ? integerFormatter : numberFormatter
|
||||||
|
const text = Number.isFinite(scaled) ? formatter.format(scaled) : '-'
|
||||||
|
if (key === '__free_cash_flow') {
|
||||||
|
const isNeg = typeof scaled === 'number' && scaled < 0
|
||||||
|
return (
|
||||||
|
<TableCell key={y} className="text-right p-2">{isNeg ? <span className="text-red-600 bg-red-100">{text}</span> : text}</TableCell>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return <TableCell key={y} className="text-right p-2">{text}</TableCell>
|
||||||
|
}
|
||||||
|
})}
|
||||||
|
</TableRow>
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
const getVal = (arr: Array<{ year?: string; value?: number | null }> | undefined, y: string) => {
|
||||||
|
const v = arr?.find(p => p?.year === y)?.value
|
||||||
|
return typeof v === 'number' ? v : (v == null ? null : Number(v))
|
||||||
|
}
|
||||||
|
|
||||||
|
// 费用指标
|
||||||
|
const feeHeaderRow = (
|
||||||
|
<TableRow key="__fee_metrics_row" className="bg-muted hover:bg-purple-100">
|
||||||
|
<TableCell className="p-2 font-medium ">费用指标</TableCell>
|
||||||
|
{years.map((y) => (
|
||||||
|
<TableCell key={y} className="p-2"></TableCell>
|
||||||
|
))}
|
||||||
|
</TableRow>
|
||||||
|
)
|
||||||
|
const feeRows = [
|
||||||
|
{ key: '__sell_rate', label: '销售费用率', num: series['sell_exp'] as any, den: series['revenue'] as any },
|
||||||
|
{ key: '__admin_rate', label: '管理费用率', num: series['admin_exp'] as any, den: series['revenue'] as any },
|
||||||
|
{ key: '__rd_rate', label: '研发费用率', num: series['rd_exp'] as any, den: series['revenue'] as any },
|
||||||
|
{ key: '__other_fee_rate', label: '其他费用率', num: undefined, den: series['revenue'] as any },
|
||||||
|
{ key: '__tax_rate', label: '所得税率', num: series['tax_to_ebt'] as any, den: undefined },
|
||||||
|
{ key: '__depr_ratio', label: '折旧费用占比', num: series['depr_fa_coga_dpba'] as any, den: series['revenue'] as any },
|
||||||
|
].map(({ key, label, num, den }) => (
|
||||||
|
<TableRow key={key} className="hover:bg-purple-100">
|
||||||
|
<TableCell className="p-2 text-muted-foreground">{label}</TableCell>
|
||||||
|
{years.map((y) => {
|
||||||
|
let rate: number | null = null
|
||||||
|
if (key === '__tax_rate') {
|
||||||
|
const numerator = getVal(num, y)
|
||||||
|
if (numerator == null || Number.isNaN(numerator)) {
|
||||||
|
rate = null
|
||||||
|
} else if (Math.abs(numerator) <= 1) {
|
||||||
|
rate = numerator * 100
|
||||||
|
} else {
|
||||||
|
rate = numerator
|
||||||
|
}
|
||||||
|
} else if (key === '__other_fee_rate') {
|
||||||
|
const gpRaw = getVal(series['grossprofit_margin'] as any, y)
|
||||||
|
const npRaw = getVal(series['netprofit_margin'] as any, y)
|
||||||
|
const rev = getVal(series['revenue'] as any, y)
|
||||||
|
const sell = getVal(series['sell_exp'] as any, y)
|
||||||
|
const admin = getVal(series['admin_exp'] as any, y)
|
||||||
|
const rd = getVal(series['rd_exp'] as any, y)
|
||||||
|
if (gpRaw == null || npRaw == null || rev == null || rev === 0 || sell == null || admin == null || rd == null) {
|
||||||
|
rate = null
|
||||||
|
} else {
|
||||||
|
const gp = Math.abs(gpRaw) <= 1 ? gpRaw * 100 : gpRaw
|
||||||
|
const np = Math.abs(npRaw) <= 1 ? npRaw * 100 : npRaw
|
||||||
|
const sellRate = (sell / rev) * 100
|
||||||
|
const adminRate = (admin / rev) * 100
|
||||||
|
const rdRate = (rd / rev) * 100
|
||||||
|
rate = gp - np - sellRate - adminRate - rdRate
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const numerator = getVal(num, y)
|
||||||
|
const denominator = getVal(den, y)
|
||||||
|
if (numerator == null || denominator == null || denominator === 0) {
|
||||||
|
rate = null
|
||||||
|
} else {
|
||||||
|
rate = (numerator / denominator) * 100
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (rate == null || !Number.isFinite(rate)) {
|
||||||
|
return <TableCell key={y} className="text-right p-2">-</TableCell>
|
||||||
|
}
|
||||||
|
const rateText = numberFormatter.format(rate)
|
||||||
|
const isNegative = rate < 0
|
||||||
|
return (
|
||||||
|
<TableCell key={y} className="text-right p-2">
|
||||||
|
{isNegative ? <span className="text-red-600 bg-red-100">{rateText}%</span> : `${rateText}%`}
|
||||||
|
</TableCell>
|
||||||
|
)
|
||||||
|
})}
|
||||||
|
</TableRow>
|
||||||
|
))
|
||||||
|
|
||||||
|
// 资产占比
|
||||||
|
const assetHeaderRow = (
|
||||||
|
<TableRow key="__asset_ratio_row" className="bg-muted hover:bg-purple-100">
|
||||||
|
<TableCell className="p-2 font-medium ">资产占比</TableCell>
|
||||||
|
{years.map((y) => (
|
||||||
|
<TableCell key={y} className="p-2"></TableCell>
|
||||||
|
))}
|
||||||
|
</TableRow>
|
||||||
|
)
|
||||||
|
const ratioCell = (value: number | null, y: string) => {
|
||||||
|
if (value == null || !Number.isFinite(value)) {
|
||||||
|
return <TableCell key={y} className="text-right p-2">-</TableCell>
|
||||||
|
}
|
||||||
|
const text = numberFormatter.format(value)
|
||||||
|
const isNegative = value < 0
|
||||||
|
return (
|
||||||
|
<TableCell key={y} className="text-right p-2">
|
||||||
|
{isNegative ? <span className="text-red-600 bg-red-100">{text}%</span> : `${text}%`}
|
||||||
|
</TableCell>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const assetRows = [
|
||||||
|
{ key: '__money_cap_ratio', label: '现金占比', calc: (y: string) => {
|
||||||
|
const num = getVal(series['money_cap'] as any, y)
|
||||||
|
const den = getVal(series['total_assets'] as any, y)
|
||||||
|
return num == null || den == null || den === 0 ? null : (num / den) * 100
|
||||||
|
} },
|
||||||
|
{ key: '__inventories_ratio', label: '库存占比', calc: (y: string) => {
|
||||||
|
const num = getVal(series['inventories'] as any, y)
|
||||||
|
const den = getVal(series['total_assets'] as any, y)
|
||||||
|
return num == null || den == null || den === 0 ? null : (num / den) * 100
|
||||||
|
} },
|
||||||
|
{ key: '__ar_ratio', label: '应收款占比', calc: (y: string) => {
|
||||||
|
const num = getVal(series['accounts_receiv_bill'] as any, y)
|
||||||
|
const den = getVal(series['total_assets'] as any, y)
|
||||||
|
return num == null || den == null || den === 0 ? null : (num / den) * 100
|
||||||
|
} },
|
||||||
|
{ key: '__prepay_ratio', label: '预付款占比', calc: (y: string) => {
|
||||||
|
const num = getVal(series['prepayment'] as any, y)
|
||||||
|
const den = getVal(series['total_assets'] as any, y)
|
||||||
|
return num == null || den == null || den === 0 ? null : (num / den) * 100
|
||||||
|
} },
|
||||||
|
{ key: '__fix_assets_ratio', label: '固定资产占比', calc: (y: string) => {
|
||||||
|
const num = getVal(series['fix_assets'] as any, y)
|
||||||
|
const den = getVal(series['total_assets'] as any, y)
|
||||||
|
return num == null || den == null || den === 0 ? null : (num / den) * 100
|
||||||
|
} },
|
||||||
|
{ key: '__lt_invest_ratio', label: '长期投资占比', calc: (y: string) => {
|
||||||
|
const num = getVal(series['lt_eqt_invest'] as any, y)
|
||||||
|
const den = getVal(series['total_assets'] as any, y)
|
||||||
|
return num == null || den == null || den === 0 ? null : (num / den) * 100
|
||||||
|
} },
|
||||||
|
{ key: '__goodwill_ratio', label: '商誉占比', calc: (y: string) => {
|
||||||
|
const num = getVal(series['goodwill'] as any, y)
|
||||||
|
const den = getVal(series['total_assets'] as any, y)
|
||||||
|
return num == null || den == null || den === 0 ? null : (num / den) * 100
|
||||||
|
} },
|
||||||
|
{ key: '__other_assets_ratio', label: '其他资产占比', calc: (y: string) => {
|
||||||
|
const total = getVal(series['total_assets'] as any, y)
|
||||||
|
if (total == null || total === 0) return null
|
||||||
|
const parts = [
|
||||||
|
getVal(series['money_cap'] as any, y) || 0,
|
||||||
|
getVal(series['inventories'] as any, y) || 0,
|
||||||
|
getVal(series['accounts_receiv_bill'] as any, y) || 0,
|
||||||
|
getVal(series['prepayment'] as any, y) || 0,
|
||||||
|
getVal(series['fix_assets'] as any, y) || 0,
|
||||||
|
getVal(series['lt_eqt_invest'] as any, y) || 0,
|
||||||
|
getVal(series['goodwill'] as any, y) || 0,
|
||||||
|
]
|
||||||
|
const sumKnown = parts.reduce((acc: number, v: number) => acc + v, 0)
|
||||||
|
return ((total - sumKnown) / total) * 100
|
||||||
|
} },
|
||||||
|
{ key: '__ap_ratio', label: '应付款占比', calc: (y: string) => {
|
||||||
|
const num = getVal(series['accounts_pay'] as any, y)
|
||||||
|
const den = getVal(series['total_assets'] as any, y)
|
||||||
|
return num == null || den == null || den === 0 ? null : (num / den) * 100
|
||||||
|
} },
|
||||||
|
{ key: '__adv_ratio', label: '预收款占比', calc: (y: string) => {
|
||||||
|
const adv = getVal(series['adv_receipts'] as any, y) || 0
|
||||||
|
const contractLiab = getVal(series['contract_liab'] as any, y) || 0
|
||||||
|
const num = adv + contractLiab
|
||||||
|
const den = getVal(series['total_assets'] as any, y)
|
||||||
|
return den == null || den === 0 ? null : (num / den) * 100
|
||||||
|
} },
|
||||||
|
{ key: '__st_borr_ratio', label: '短期借款占比', calc: (y: string) => {
|
||||||
|
const num = getVal(series['st_borr'] as any, y)
|
||||||
|
const den = getVal(series['total_assets'] as any, y)
|
||||||
|
return num == null || den == null || den === 0 ? null : (num / den) * 100
|
||||||
|
} },
|
||||||
|
{ key: '__lt_borr_ratio', label: '长期借款占比', calc: (y: string) => {
|
||||||
|
const num = getVal(series['lt_borr'] as any, y)
|
||||||
|
const den = getVal(series['total_assets'] as any, y)
|
||||||
|
return num == null || den == null || den === 0 ? null : (num / den) * 100
|
||||||
|
} },
|
||||||
|
{ key: '__interest_bearing_debt_ratio', label: '有息负债率', calc: (y: string) => {
|
||||||
|
const total = getVal(series['total_assets'] as any, y)
|
||||||
|
if (total == null || total === 0) return null
|
||||||
|
const st = getVal(series['st_borr'] as any, y) || 0
|
||||||
|
const lt = getVal(series['lt_borr'] as any, y) || 0
|
||||||
|
return ((st + lt) / total) * 100
|
||||||
|
} },
|
||||||
|
{ key: '__operating_assets_ratio', label: '运营资产占比', calc: (y: string) => {
|
||||||
|
const total = getVal(series['total_assets'] as any, y)
|
||||||
|
if (total == null || total === 0) return null
|
||||||
|
const inv = getVal(series['inventories'] as any, y) || 0
|
||||||
|
const ar = getVal(series['accounts_receiv_bill'] as any, y) || 0
|
||||||
|
const pre = getVal(series['prepayment'] as any, y) || 0
|
||||||
|
const ap = getVal(series['accounts_pay'] as any, y) || 0
|
||||||
|
const adv = getVal(series['adv_receipts'] as any, y) || 0
|
||||||
|
const contractLiab = getVal(series['contract_liab'] as any, y) || 0
|
||||||
|
const operating = inv + ar + pre - ap - adv - contractLiab
|
||||||
|
return (operating / total) * 100
|
||||||
|
} },
|
||||||
|
].map(({ key, label, calc }) => (
|
||||||
|
<TableRow key={key} className={`hover:bg-purple-100 ${key === '__other_assets_ratio' ? 'bg-yellow-50' : ''}`}>
|
||||||
|
<TableCell className="p-2 text-muted-foreground">{label}</TableCell>
|
||||||
|
{years.map((y) => ratioCell(calc(y), y))}
|
||||||
|
</TableRow>
|
||||||
|
))
|
||||||
|
|
||||||
|
// 周转能力
|
||||||
|
const turnoverHeaderRow = (
|
||||||
|
<TableRow key="__turnover_row" className="bg-muted hover:bg-purple-100">
|
||||||
|
<TableCell className="p-2 font-medium ">周转能力</TableCell>
|
||||||
|
{years.map((y) => (
|
||||||
|
<TableCell key={y} className="p-2"></TableCell>
|
||||||
|
))}
|
||||||
|
</TableRow>
|
||||||
|
)
|
||||||
|
const getYearNumber = (ys: string) => {
|
||||||
|
const n = Number(ys)
|
||||||
|
return Number.isFinite(n) ? n : null
|
||||||
|
}
|
||||||
|
const getPoint = (arr: Array<{ year?: string; value?: number | null }> | undefined, year: string) => {
|
||||||
|
return arr?.find(p => p?.year === year)?.value ?? null
|
||||||
|
}
|
||||||
|
const getAvg = (arr: Array<{ year?: string; value?: number | null }> | undefined, year: string) => {
|
||||||
|
const curr = getPoint(arr, year)
|
||||||
|
const yNum = getYearNumber(year)
|
||||||
|
const prevYear = yNum != null ? String(yNum - 1) : null
|
||||||
|
const prev = prevYear ? getPoint(arr, prevYear) : null
|
||||||
|
const c = typeof curr === 'number' ? curr : (curr == null ? null : Number(curr))
|
||||||
|
const p = typeof prev === 'number' ? prev : (prev == null ? null : Number(prev))
|
||||||
|
if (c == null) return null
|
||||||
|
if (p == null) return c
|
||||||
|
return (c + p) / 2
|
||||||
|
}
|
||||||
|
const getMarginRatio = (year: string) => {
|
||||||
|
const gmRaw = getPoint(series['grossprofit_margin'] as any, year)
|
||||||
|
if (gmRaw == null) return null
|
||||||
|
const gmNum = typeof gmRaw === 'number' ? gmRaw : Number(gmRaw)
|
||||||
|
if (!Number.isFinite(gmNum)) return null
|
||||||
|
return Math.abs(gmNum) <= 1 ? gmNum : gmNum / 100
|
||||||
|
}
|
||||||
|
const getRevenue = (year: string) => {
|
||||||
|
const rev = getPoint(series['revenue'] as any, year)
|
||||||
|
const r = typeof rev === 'number' ? rev : (rev == null ? null : Number(rev))
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
const getCOGS = (year: string) => {
|
||||||
|
const rev = getRevenue(year)
|
||||||
|
const gm = getMarginRatio(year)
|
||||||
|
if (rev == null || gm == null) return null
|
||||||
|
const cogs = rev * (1 - gm)
|
||||||
|
return Number.isFinite(cogs) ? cogs : null
|
||||||
|
}
|
||||||
|
const turnoverItems: Array<{ key: string; label: string }> = [
|
||||||
|
{ key: 'invturn_days', label: '存货周转天数' },
|
||||||
|
{ key: 'arturn_days', label: '应收款周转天数' },
|
||||||
|
{ key: 'payturn_days', label: '应付款周转天数' },
|
||||||
|
{ key: 'fa_turn', label: '固定资产周转率' },
|
||||||
|
{ key: 'assets_turn', label: '总资产周转率' },
|
||||||
|
]
|
||||||
|
const turnoverRows = turnoverItems.map(({ key, label }) => (
|
||||||
|
<TableRow key={key} className="hover:bg-purple-100">
|
||||||
|
<TableCell className="p-2 text-muted-foreground">{label}</TableCell>
|
||||||
|
{years.map((y) => {
|
||||||
|
let value: number | null = null
|
||||||
|
if (key === 'payturn_days') {
|
||||||
|
const avgAP = getAvg(series['accounts_pay'] as any, y)
|
||||||
|
const cogs = getCOGS(y)
|
||||||
|
value = avgAP == null || cogs == null || cogs === 0 ? null : (365 * avgAP) / cogs
|
||||||
|
} else {
|
||||||
|
const arr = series[key] as Array<{ year?: string; value?: number | null }> | undefined
|
||||||
|
const v = arr?.find(p => p?.year === y)?.value ?? null
|
||||||
|
const num = typeof v === 'number' ? v : (v == null ? null : Number(v))
|
||||||
|
value = num == null || Number.isNaN(num) ? null : num
|
||||||
|
}
|
||||||
|
if (value == null || !Number.isFinite(value)) {
|
||||||
|
return <TableCell key={y} className="text-right p-2">-</TableCell>
|
||||||
|
}
|
||||||
|
const text = numberFormatter.format(value)
|
||||||
|
if (key === 'arturn_days' && value > 90) {
|
||||||
|
return (
|
||||||
|
<TableCell key={y} className="text-right p-2 bg-red-100 text-red-600">{text}</TableCell>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return <TableCell key={y} className="text-right p-2">{text}</TableCell>
|
||||||
|
})}
|
||||||
|
</TableRow>
|
||||||
|
))
|
||||||
|
|
||||||
|
// 人均效率
|
||||||
|
const perCapitaHeaderRow = (
|
||||||
|
<TableRow key="__per_capita_row" className="bg-muted hover:bg-purple-100">
|
||||||
|
<TableCell className="p-2 font-medium ">人均效率</TableCell>
|
||||||
|
{years.map((y) => (
|
||||||
|
<TableCell key={y} className="p-2"></TableCell>
|
||||||
|
))}
|
||||||
|
</TableRow>
|
||||||
|
)
|
||||||
|
const employeesRow = (
|
||||||
|
<TableRow key="__employees_row" className="hover:bg-purple-100">
|
||||||
|
<TableCell className="p-2 text-muted-foreground">员工人数</TableCell>
|
||||||
|
{years.map((y) => {
|
||||||
|
const v = getVal(series['employees'] as any, y)
|
||||||
|
if (v == null || !Number.isFinite(v)) {
|
||||||
|
return <TableCell key={y} className="text-right p-2">-</TableCell>
|
||||||
|
}
|
||||||
|
return <TableCell key={y} className="text-right p-2">{integerFormatter.format(Math.round(v))}</TableCell>
|
||||||
|
})}
|
||||||
|
</TableRow>
|
||||||
|
)
|
||||||
|
const revPerEmpRow = (
|
||||||
|
<TableRow key="__rev_per_emp_row" className="hover:bg-purple-100">
|
||||||
|
<TableCell className="p-2 text-muted-foreground">人均创收(万元)</TableCell>
|
||||||
|
{years.map((y) => {
|
||||||
|
const rev = getVal(series['revenue'] as any, y)
|
||||||
|
const emp = getVal(series['employees'] as any, y)
|
||||||
|
if (rev == null || emp == null || emp === 0) {
|
||||||
|
return <TableCell key={y} className="text-right p-2">-</TableCell>
|
||||||
|
}
|
||||||
|
const val = (rev / emp) / 10000
|
||||||
|
return <TableCell key={y} className="text-right p-2">{numberFormatter.format(val)}</TableCell>
|
||||||
|
})}
|
||||||
|
</TableRow>
|
||||||
|
)
|
||||||
|
const profitPerEmpRow = (
|
||||||
|
<TableRow key="__profit_per_emp_row" className="hover:bg-purple-100">
|
||||||
|
<TableCell className="p-2 text-muted-foreground">人均创利(万元)</TableCell>
|
||||||
|
{years.map((y) => {
|
||||||
|
const prof = getVal(series['n_income'] as any, y)
|
||||||
|
const emp = getVal(series['employees'] as any, y)
|
||||||
|
if (prof == null || emp == null || emp === 0) {
|
||||||
|
return <TableCell key={y} className="text-right p-2">-</TableCell>
|
||||||
|
}
|
||||||
|
const val = (prof / emp) / 10000
|
||||||
|
return <TableCell key={y} className="text-right p-2">{numberFormatter.format(val)}</TableCell>
|
||||||
|
})}
|
||||||
|
</TableRow>
|
||||||
|
)
|
||||||
|
const salaryPerEmpRow = (
|
||||||
|
<TableRow key="__salary_per_emp_row" className="hover:bg-purple-100">
|
||||||
|
<TableCell className="p-2 text-muted-foreground">人均工资(万元)</TableCell>
|
||||||
|
{years.map((y) => {
|
||||||
|
const salaryPaid = getVal(series['c_paid_to_for_empl'] as any, y)
|
||||||
|
const emp = getVal(series['employees'] as any, y)
|
||||||
|
if (salaryPaid == null || emp == null || emp === 0) {
|
||||||
|
return <TableCell key={y} className="text-right p-2">-</TableCell>
|
||||||
|
}
|
||||||
|
const val = (salaryPaid / emp) / 10000
|
||||||
|
return <TableCell key={y} className="text-right p-2">{numberFormatter.format(val)}</TableCell>
|
||||||
|
})}
|
||||||
|
</TableRow>
|
||||||
|
)
|
||||||
|
|
||||||
|
// 市场表现
|
||||||
|
const marketHeaderRow = (
|
||||||
|
<TableRow key="__market_perf_row" className="bg-muted hover:bg-purple-100">
|
||||||
|
<TableCell className="p-2 font-medium ">市场表现</TableCell>
|
||||||
|
{years.map((y) => (
|
||||||
|
<TableCell key={y} className="p-2"></TableCell>
|
||||||
|
))}
|
||||||
|
</TableRow>
|
||||||
|
)
|
||||||
|
const priceRow = (
|
||||||
|
<TableRow key="__price_row" className="hover:bg-purple-100">
|
||||||
|
<TableCell className="p-2 text-muted-foreground">股价</TableCell>
|
||||||
|
{years.map((y) => {
|
||||||
|
const arr = series['close'] as Array<{ year?: string; value?: number | null }> | undefined
|
||||||
|
const v = arr?.find(p => p?.year === y)?.value ?? null
|
||||||
|
const num = typeof v === 'number' ? v : (v == null ? null : Number(v))
|
||||||
|
if (num == null || !Number.isFinite(num)) return <TableCell key={y} className="text-right p-2">-</TableCell>
|
||||||
|
return <TableCell key={y} className="text-right p-2">{numberFormatter.format(num)}</TableCell>
|
||||||
|
})}
|
||||||
|
</TableRow>
|
||||||
|
)
|
||||||
|
const marketCapRow = (
|
||||||
|
<TableRow key="__market_cap_row" className="hover:bg-purple-100">
|
||||||
|
<TableCell className="p-2 text-muted-foreground">市值(亿元)</TableCell>
|
||||||
|
{years.map((y) => {
|
||||||
|
const arr = series['total_mv'] as Array<{ year?: string; value?: number | null }> | undefined
|
||||||
|
const v = arr?.find(p => p?.year === y)?.value ?? null
|
||||||
|
const num = typeof v === 'number' ? v : (v == null ? null : Number(v))
|
||||||
|
if (num == null || !Number.isFinite(num)) return <TableCell key={y} className="text-right p-2">-</TableCell>
|
||||||
|
const scaled = num / 10000
|
||||||
|
return <TableCell key={y} className="text-right p-2">{integerFormatter.format(Math.round(scaled))}</TableCell>
|
||||||
|
})}
|
||||||
|
</TableRow>
|
||||||
|
)
|
||||||
|
const peRow = (
|
||||||
|
<TableRow key="__pe_row" className="hover:bg-purple-100">
|
||||||
|
<TableCell className="p-2 text-muted-foreground">PE</TableCell>
|
||||||
|
{years.map((y) => {
|
||||||
|
const arr = series['pe'] as Array<{ year?: string; value?: number | null }> | undefined
|
||||||
|
const v = arr?.find(p => p?.year === y)?.value ?? null
|
||||||
|
const num = typeof v === 'number' ? v : (v == null ? null : Number(v))
|
||||||
|
if (num == null || !Number.isFinite(num)) return <TableCell key={y} className="text-right p-2">-</TableCell>
|
||||||
|
return <TableCell key={y} className="text-right p-2">{numberFormatter.format(num)}</TableCell>
|
||||||
|
})}
|
||||||
|
</TableRow>
|
||||||
|
)
|
||||||
|
const pbRow = (
|
||||||
|
<TableRow key="__pb_row" className="hover:bg-purple-100">
|
||||||
|
<TableCell className="p-2 text-muted-foreground">PB</TableCell>
|
||||||
|
{years.map((y) => {
|
||||||
|
const arr = series['pb'] as Array<{ year?: string; value?: number | null }> | undefined
|
||||||
|
const v = arr?.find(p => p?.year === y)?.value ?? null
|
||||||
|
const num = typeof v === 'number' ? v : (v == null ? null : Number(v))
|
||||||
|
if (num == null || !Number.isFinite(num)) return <TableCell key={y} className="text-right p-2">-</TableCell>
|
||||||
|
return <TableCell key={y} className="text-right p-2">{numberFormatter.format(num)}</TableCell>
|
||||||
|
})}
|
||||||
|
</TableRow>
|
||||||
|
)
|
||||||
|
const holderNumRow = (
|
||||||
|
<TableRow key="__holder_num_row" className="hover:bg-purple-100">
|
||||||
|
<TableCell className="p-2 text-muted-foreground">股东户数</TableCell>
|
||||||
|
{years.map((y) => {
|
||||||
|
const arr = series['holder_num'] as Array<{ year?: string; value?: number | null }> | undefined
|
||||||
|
const v = arr?.find(p => p?.year === y)?.value ?? null
|
||||||
|
const num = typeof v === 'number' ? v : (v == null ? null : Number(v))
|
||||||
|
if (num == null || !Number.isFinite(num)) return <TableCell key={y} className="text-right p-2">-</TableCell>
|
||||||
|
return <TableCell key={y} className="text-right p-2">{integerFormatter.format(Math.round(num))}</TableCell>
|
||||||
|
})}
|
||||||
|
</TableRow>
|
||||||
|
)
|
||||||
|
|
||||||
|
return [
|
||||||
|
summaryRow,
|
||||||
|
...rows,
|
||||||
|
feeHeaderRow,
|
||||||
|
...feeRows,
|
||||||
|
assetHeaderRow,
|
||||||
|
...assetRows,
|
||||||
|
turnoverHeaderRow,
|
||||||
|
...turnoverRows,
|
||||||
|
perCapitaHeaderRow,
|
||||||
|
employeesRow,
|
||||||
|
revPerEmpRow,
|
||||||
|
profitPerEmpRow,
|
||||||
|
salaryPerEmpRow,
|
||||||
|
marketHeaderRow,
|
||||||
|
priceRow,
|
||||||
|
marketCapRow,
|
||||||
|
peRow,
|
||||||
|
pbRow,
|
||||||
|
holderNumRow,
|
||||||
|
]
|
||||||
|
})()}
|
||||||
|
</TableBody>
|
||||||
|
</Table>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
})()}
|
||||||
|
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
</TabsContent>
|
||||||
|
|
||||||
|
<TabsContent value="meta" className="space-y-4">
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-base">元数据(数据库原始记录)</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<pre className="text-xs leading-relaxed overflow-auto">
|
||||||
|
{JSON.stringify(data, null, 2)}
|
||||||
|
</pre>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
</TabsContent>
|
||||||
|
|
||||||
|
{ordered.filter(o => o.id !== 'financial' && o.id !== 'meta').map((o) => {
|
||||||
|
const key = findKey(o.id)
|
||||||
|
const item = key ? analyses[key] || {} : {}
|
||||||
|
const md = stripTopHeadings(String(item?.content || ''))
|
||||||
|
const err = item?.error as string | undefined
|
||||||
|
return (
|
||||||
|
<TabsContent key={o.id} value={o.id} className="space-y-3">
|
||||||
|
{err && <div className="text-sm text-red-600">{err}</div>}
|
||||||
|
<div className="border rounded-lg p-6 bg-card">
|
||||||
|
<article className="markdown-body" style={{
|
||||||
|
boxSizing: 'border-box', minWidth: '200px', maxWidth: '980px', margin: '0 auto', padding: 0
|
||||||
|
}}>
|
||||||
|
<h2 className="text-lg font-medium mb-3">{o.label}</h2>
|
||||||
|
<ReactMarkdown remarkPlugins={[remarkGfm]}>
|
||||||
|
{md}
|
||||||
|
</ReactMarkdown>
|
||||||
|
</article>
|
||||||
|
</div>
|
||||||
|
</TabsContent>
|
||||||
|
)
|
||||||
|
})}
|
||||||
|
</Tabs>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
@ -1,48 +1,60 @@
|
|||||||
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
|
import Link from 'next/link'
|
||||||
import { Badge } from "@/components/ui/badge";
|
import { headers } from 'next/headers'
|
||||||
|
|
||||||
|
async function fetchReports(baseUrl: string) {
|
||||||
|
const url = `${baseUrl}/api/reports?limit=50`
|
||||||
|
const resp = await fetch(url, { cache: 'no-store' })
|
||||||
|
if (!resp.ok) {
|
||||||
|
return { items: [], total: 0 }
|
||||||
|
}
|
||||||
|
return resp.json() as Promise<{ items: Array<{ id: string; symbol: string; createdAt: string; content?: any }>; total: number }>
|
||||||
|
}
|
||||||
|
|
||||||
|
export default async function ReportsPage() {
|
||||||
|
const h = await headers()
|
||||||
|
const host = h.get('x-forwarded-host') || h.get('host') || 'localhost:3000'
|
||||||
|
const proto = h.get('x-forwarded-proto') || 'http'
|
||||||
|
const base = process.env.NEXT_PUBLIC_BASE_URL || `${proto}://${host}`
|
||||||
|
const { items, total } = await fetchReports(base)
|
||||||
|
|
||||||
export default function ReportsPage() {
|
|
||||||
return (
|
return (
|
||||||
<div className="space-y-6">
|
<div className="space-y-4">
|
||||||
<header className="space-y-2">
|
<div className="flex items-center justify-between">
|
||||||
<h1 className="text-2xl font-semibold">报表中心</h1>
|
<h1 className="text-2xl font-semibold">历史分析报告</h1>
|
||||||
<p className="text-sm text-muted-foreground">查看与管理财务报表与分析结果。</p>
|
<div className="text-sm text-muted-foreground">共 {total} 条</div>
|
||||||
</header>
|
|
||||||
|
|
||||||
<div className="grid gap-4 sm:grid-cols-2 lg:grid-cols-3">
|
|
||||||
<Card>
|
|
||||||
<CardHeader>
|
|
||||||
<CardTitle>利润表</CardTitle>
|
|
||||||
<CardDescription>收入、成本、净利润</CardDescription>
|
|
||||||
</CardHeader>
|
|
||||||
<CardContent className="space-x-2">
|
|
||||||
<Badge variant="outline">季度</Badge>
|
|
||||||
<Badge variant="secondary">年度</Badge>
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
|
|
||||||
<Card>
|
|
||||||
<CardHeader>
|
|
||||||
<CardTitle>资产负债表</CardTitle>
|
|
||||||
<CardDescription>资产、负债、权益</CardDescription>
|
|
||||||
</CardHeader>
|
|
||||||
<CardContent className="space-x-2">
|
|
||||||
<Badge variant="outline">结构</Badge>
|
|
||||||
<Badge variant="secondary">趋势</Badge>
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
|
|
||||||
<Card>
|
|
||||||
<CardHeader>
|
|
||||||
<CardTitle>现金流量表</CardTitle>
|
|
||||||
<CardDescription>经营、投资、筹资</CardDescription>
|
|
||||||
</CardHeader>
|
|
||||||
<CardContent className="space-x-2">
|
|
||||||
<Badge variant="outline">自由现金流</Badge>
|
|
||||||
<Badge variant="secondary">质量</Badge>
|
|
||||||
</CardContent>
|
|
||||||
</Card>
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{items.length === 0 ? (
|
||||||
|
<p className="text-sm text-muted-foreground">暂无报告</p>
|
||||||
|
) : (
|
||||||
|
<div className="overflow-x-auto border rounded-md">
|
||||||
|
<table className="min-w-full text-sm">
|
||||||
|
<thead>
|
||||||
|
<tr className="bg-muted">
|
||||||
|
<th className="text-left p-3">股票代码</th>
|
||||||
|
<th className="text-left p-3">公司名称</th>
|
||||||
|
<th className="text-left p-3">创建时间</th>
|
||||||
|
<th className="text-right p-3">操作</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
{items.map((r) => {
|
||||||
|
const name = (r as any)?.content?.financials?.name || (r as any)?.content?.company_name || ''
|
||||||
|
return (
|
||||||
|
<tr key={r.id} className="border-t hover:bg-muted/50">
|
||||||
|
<td className="p-3 font-medium">{r.symbol}</td>
|
||||||
|
<td className="p-3">{name || <span className="text-muted-foreground">-</span>}</td>
|
||||||
|
<td className="p-3">{new Date(r.createdAt).toLocaleString()}</td>
|
||||||
|
<td className="p-3 text-right">
|
||||||
|
<Link href={`/reports/${r.id}`} className="text-primary hover:underline">查看</Link>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
)
|
||||||
|
})}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
);
|
)
|
||||||
}
|
}
|
||||||
13
frontend/src/lib/prisma.ts
Normal file
13
frontend/src/lib/prisma.ts
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
import { PrismaClient } from '@prisma/client'
|
||||||
|
|
||||||
|
const globalForPrisma = global as unknown as { prisma?: PrismaClient }
|
||||||
|
|
||||||
|
export const prisma =
|
||||||
|
globalForPrisma.prisma ||
|
||||||
|
new PrismaClient({
|
||||||
|
log: ['error', 'warn']
|
||||||
|
})
|
||||||
|
|
||||||
|
if (process.env.NODE_ENV !== 'production') globalForPrisma.prisma = prisma
|
||||||
|
|
||||||
|
|
||||||
@ -13,9 +13,12 @@ BACKEND_DIR="$REPO_ROOT/backend"
|
|||||||
FRONTEND_DIR="$REPO_ROOT/frontend"
|
FRONTEND_DIR="$REPO_ROOT/frontend"
|
||||||
CONFIG_FILE="$REPO_ROOT/config/config.json"
|
CONFIG_FILE="$REPO_ROOT/config/config.json"
|
||||||
|
|
||||||
|
# Guard to ensure cleanup runs only once
|
||||||
|
__CLEANED_UP=0
|
||||||
|
|
||||||
# Port configuration
|
# Port configuration
|
||||||
BACKEND_PORT=8000
|
BACKEND_PORT=8000
|
||||||
FRONTEND_PORT=3000
|
FRONTEND_PORT=3001
|
||||||
|
|
||||||
# Kill process using specified port
|
# Kill process using specified port
|
||||||
kill_port() {
|
kill_port() {
|
||||||
@ -70,8 +73,10 @@ run_backend() {
|
|||||||
ensure_backend
|
ensure_backend
|
||||||
cd "$BACKEND_DIR"
|
cd "$BACKEND_DIR"
|
||||||
# Run and colorize output (avoid stdbuf on macOS)
|
# Run and colorize output (avoid stdbuf on macOS)
|
||||||
UVICORN_CMD=(uvicorn app.main:app --reload --port "$BACKEND_PORT")
|
UVICORN_CMD=(uvicorn app.main:app --reload --port "$BACKEND_PORT" --log-level info)
|
||||||
"${UVICORN_CMD[@]}" 2>&1 | awk -v p="[BACKEND]" -v color="$GREEN" -v reset="$RESET" '{print color p " " $0 reset}'
|
"${UVICORN_CMD[@]}" 2>&1 | while IFS= read -r line; do
|
||||||
|
printf "%b[%s] [BACKEND] %s%b\n" "$GREEN" "$(date '+%Y-%m-%d %H:%M:%S')" "$line" "$RESET"
|
||||||
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
ensure_frontend() {
|
ensure_frontend() {
|
||||||
@ -85,27 +90,70 @@ ensure_frontend() {
|
|||||||
run_frontend() {
|
run_frontend() {
|
||||||
ensure_frontend
|
ensure_frontend
|
||||||
cd "$FRONTEND_DIR"
|
cd "$FRONTEND_DIR"
|
||||||
npm run dev 2>&1 | awk -v p="[FRONTEND]" -v color="$CYAN" -v reset="$RESET" '{print color p " " $0 reset}'
|
npm run dev 2>&1 | while IFS= read -r line; do
|
||||||
|
printf "%b[%s] [FRONTEND] %s%b\n" "$CYAN" "$(date '+%Y-%m-%d %H:%M:%S')" "$line" "$RESET"
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
# Recursively kill a process tree (children first), with optional signal (default TERM)
|
||||||
|
kill_tree() {
|
||||||
|
local pid="$1"
|
||||||
|
local signal="${2:-TERM}"
|
||||||
|
if [[ -z "${pid:-}" ]]; then
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
# Kill children first
|
||||||
|
local children
|
||||||
|
children=$(pgrep -P "$pid" 2>/dev/null || true)
|
||||||
|
if [[ -n "${children:-}" ]]; then
|
||||||
|
for child in $children; do
|
||||||
|
kill_tree "$child" "$signal"
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
# Then the parent
|
||||||
|
kill -"$signal" "$pid" 2>/dev/null || true
|
||||||
}
|
}
|
||||||
|
|
||||||
cleanup() {
|
cleanup() {
|
||||||
|
# Ensure this runs only once even if multiple signals (INT/TERM/EXIT) arrive
|
||||||
|
if [[ $__CLEANED_UP -eq 1 ]]; then
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
__CLEANED_UP=1
|
||||||
|
|
||||||
echo -e "\n${YELLOW}[CLEANUP]${RESET} Stopping services..."
|
echo -e "\n${YELLOW}[CLEANUP]${RESET} Stopping services..."
|
||||||
|
|
||||||
# Kill process groups to ensure all child processes are terminated
|
# Gracefully stop trees for backend and frontend, then escalate if needed
|
||||||
if [[ -n "${BACKEND_PID:-}" ]]; then
|
if [[ -n "${BACKEND_PID:-}" ]]; then
|
||||||
kill -TERM -"$BACKEND_PID" 2>/dev/null || kill "$BACKEND_PID" 2>/dev/null || true
|
kill_tree "$BACKEND_PID" TERM
|
||||||
fi
|
fi
|
||||||
if [[ -n "${FRONTEND_PID:-}" ]]; then
|
if [[ -n "${FRONTEND_PID:-}" ]]; then
|
||||||
kill -TERM -"$FRONTEND_PID" 2>/dev/null || kill "$FRONTEND_PID" 2>/dev/null || true
|
kill_tree "$FRONTEND_PID" TERM
|
||||||
fi
|
fi
|
||||||
|
|
||||||
sleep 1
|
# Wait up to ~3s for graceful shutdown
|
||||||
|
for _ in 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15; do
|
||||||
# Force kill any remaining processes on these ports
|
local backend_alive=0 frontend_alive=0
|
||||||
|
if [[ -n "${BACKEND_PID:-}" ]] && kill -0 "$BACKEND_PID" 2>/dev/null; then backend_alive=1; fi
|
||||||
|
if [[ -n "${FRONTEND_PID:-}" ]] && kill -0 "$FRONTEND_PID" 2>/dev/null; then frontend_alive=1; fi
|
||||||
|
if [[ $backend_alive -eq 0 && $frontend_alive -eq 0 ]]; then
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
sleep 0.2
|
||||||
|
done
|
||||||
|
|
||||||
|
# Escalate to KILL if still alive
|
||||||
|
if [[ -n "${BACKEND_PID:-}" ]] && kill -0 "$BACKEND_PID" 2>/dev/null; then
|
||||||
|
kill_tree "$BACKEND_PID" KILL
|
||||||
|
fi
|
||||||
|
if [[ -n "${FRONTEND_PID:-}" ]] && kill -0 "$FRONTEND_PID" 2>/dev/null; then
|
||||||
|
kill_tree "$FRONTEND_PID" KILL
|
||||||
|
fi
|
||||||
|
|
||||||
|
# As a final safeguard, free the ports
|
||||||
kill_port "$BACKEND_PORT"
|
kill_port "$BACKEND_PORT"
|
||||||
kill_port "$FRONTEND_PORT"
|
kill_port "$FRONTEND_PORT"
|
||||||
|
|
||||||
wait 2>/dev/null || true
|
|
||||||
echo -e "${GREEN}[CLEANUP]${RESET} All services stopped."
|
echo -e "${GREEN}[CLEANUP]${RESET} All services stopped."
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -116,8 +164,8 @@ main() {
|
|||||||
kill_port "$BACKEND_PORT"
|
kill_port "$BACKEND_PORT"
|
||||||
kill_port "$FRONTEND_PORT"
|
kill_port "$FRONTEND_PORT"
|
||||||
|
|
||||||
echo -e "${GREEN}[BACKEND]${RESET} API: http://127.0.0.1:$BACKEND_PORT"
|
echo -e "${GREEN}[$(date '+%Y-%m-%d %H:%M:%S')] [BACKEND]${RESET} API: http://127.0.0.1:$BACKEND_PORT"
|
||||||
echo -e "${CYAN}[FRONTEND]${RESET} APP: http://127.0.0.1:$FRONTEND_PORT\n"
|
echo -e "${CYAN}[$(date '+%Y-%m-%d %H:%M:%S')] [FRONTEND]${RESET} APP: http://127.0.0.1:$FRONTEND_PORT\n"
|
||||||
|
|
||||||
run_backend & BACKEND_PID=$!
|
run_backend & BACKEND_PID=$!
|
||||||
run_frontend & FRONTEND_PID=$!
|
run_frontend & FRONTEND_PID=$!
|
||||||
|
|||||||
@ -10,7 +10,7 @@ import json
|
|||||||
# 添加项目根目录到Python路径
|
# 添加项目根目录到Python路径
|
||||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'backend'))
|
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'backend'))
|
||||||
|
|
||||||
from app.services.tushare_client import TushareClient
|
from tushare_legacy_client import TushareLegacyClient as TushareClient
|
||||||
|
|
||||||
|
|
||||||
async def test_employees_data():
|
async def test_employees_data():
|
||||||
|
|||||||
@ -11,7 +11,7 @@ from datetime import datetime, timedelta
|
|||||||
# 添加项目根目录到Python路径
|
# 添加项目根目录到Python路径
|
||||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'backend'))
|
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'backend'))
|
||||||
|
|
||||||
from app.services.tushare_client import TushareClient
|
from tushare_legacy_client import TushareLegacyClient as TushareClient
|
||||||
|
|
||||||
|
|
||||||
async def test_holder_number_data():
|
async def test_holder_number_data():
|
||||||
|
|||||||
@ -11,7 +11,7 @@ from datetime import datetime, timedelta
|
|||||||
# 添加项目根目录到Python路径
|
# 添加项目根目录到Python路径
|
||||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'backend'))
|
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'backend'))
|
||||||
|
|
||||||
from app.services.tushare_client import TushareClient
|
from tushare_legacy_client import TushareLegacyClient as TushareClient
|
||||||
|
|
||||||
|
|
||||||
async def test_holder_num_processing():
|
async def test_holder_num_processing():
|
||||||
|
|||||||
@ -9,7 +9,7 @@ import json
|
|||||||
# 添加 backend 目录到 Python 路径
|
# 添加 backend 目录到 Python 路径
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "backend"))
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "backend"))
|
||||||
|
|
||||||
from app.services.tushare_client import TushareClient
|
from tushare_legacy_client import TushareLegacyClient as TushareClient
|
||||||
|
|
||||||
async def test_tax_to_ebt():
|
async def test_tax_to_ebt():
|
||||||
# 读取配置获取 token
|
# 读取配置获取 token
|
||||||
|
|||||||
41
scripts/tushare_legacy_client.py
Normal file
41
scripts/tushare_legacy_client.py
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import asyncio
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
# Add backend to path to import TushareProvider
|
||||||
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "backend"))
|
||||||
|
from app.data_providers.tushare import TushareProvider
|
||||||
|
|
||||||
|
class TushareLegacyClient:
|
||||||
|
"""
|
||||||
|
An adapter to mimic the old TushareClient for legacy scripts,
|
||||||
|
but uses the new TushareProvider under the hood.
|
||||||
|
"""
|
||||||
|
def __init__(self, token: str):
|
||||||
|
if not token:
|
||||||
|
raise ValueError("Token must be provided.")
|
||||||
|
self.provider = TushareProvider(token=token)
|
||||||
|
|
||||||
|
async def query(
|
||||||
|
self,
|
||||||
|
api_name: str,
|
||||||
|
params: Optional[Dict[str, Any]] = None,
|
||||||
|
fields: Optional[str] = None, # Note: fields are not used in the new provider's _query
|
||||||
|
) -> List[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Mimics the .query() method by calling the provider's internal _query method.
|
||||||
|
"""
|
||||||
|
# The new _query method is protected, but we call it here for the script's sake.
|
||||||
|
return await self.provider._query(api_name=api_name, params=params, fields=fields)
|
||||||
|
|
||||||
|
async def aclose(self):
|
||||||
|
"""Mimic aclose to allow 'async with' syntax."""
|
||||||
|
if hasattr(self.provider, '_client') and self.provider._client:
|
||||||
|
await self.provider._client.aclose()
|
||||||
|
|
||||||
|
async def __aenter__(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __aexit__(self, exc_type, exc, tb):
|
||||||
|
await self.aclose()
|
||||||
Loading…
Reference in New Issue
Block a user