diff --git a/backend/app/data_manager.py b/backend/app/data_manager.py new file mode 100644 index 0000000..eaa6a87 --- /dev/null +++ b/backend/app/data_manager.py @@ -0,0 +1,164 @@ +import yaml +import os +import json +from typing import Any, Dict, List, Optional +from app.data_providers.base import BaseDataProvider +from app.data_providers.tushare import TushareProvider +# from app.data_providers.ifind import TonghsProvider +from app.data_providers.yfinance import YfinanceProvider +from app.data_providers.finnhub import FinnhubProvider + +import logging + +logger = logging.getLogger(__name__) + +class DataManager: + _instance = None + + def __new__(cls, *args, **kwargs): + if not cls._instance: + cls._instance = super(DataManager, cls).__new__(cls) + return cls._instance + + def __init__(self, config_path: str = None): + if hasattr(self, '_initialized') and self._initialized: + return + + if config_path is None: + # Assume the config file is in the 'config' directory at the root of the repo + # Find the project root by looking for the config directory + current_dir = os.path.dirname(__file__) + while current_dir != os.path.dirname(current_dir): # Not at filesystem root + if os.path.exists(os.path.join(current_dir, "config", "data_sources.yaml")): + REPO_ROOT = current_dir + break + current_dir = os.path.dirname(current_dir) + else: + # Fallback to the original calculation + REPO_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "..")) + + config_path = os.path.join(REPO_ROOT, "config", "data_sources.yaml") + + with open(config_path, 'r', encoding='utf-8') as f: + self.config = yaml.safe_load(f) + + self.providers = {} + + # Build provider base config from environment variables and config/config.json, then initialize providers + base_cfg: Dict[str, Any] = {"data_sources": {}} + + # 1) Prefer env vars when present + for name, source_config in (self.config.get('data_sources') or {}).items(): + env_var = source_config.get('api_key_env') + if env_var: + api_key = os.getenv(env_var) + if api_key: + base_cfg["data_sources"][name] = {"api_key": api_key} + else: + logger.warning(f"Env var '{env_var}' for provider '{name}' not set; will try config.json.") + + # 2) Fallback to config/config.json if tokens are provided there + try: + # Use the same REPO_ROOT calculation as data_sources.yaml + current_dir = os.path.dirname(__file__) + while current_dir != os.path.dirname(current_dir): # Not at filesystem root + if os.path.exists(os.path.join(current_dir, "config", "data_sources.yaml")): + REPO_ROOT = current_dir + break + current_dir = os.path.dirname(current_dir) + else: + # Fallback to the original calculation + REPO_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "..")) + + cfg_json_path = os.path.join(REPO_ROOT, "config", "config.json") + if os.path.exists(cfg_json_path): + with open(cfg_json_path, "r", encoding="utf-8") as jf: + cfg_json = json.load(jf) + ds_from_json = (cfg_json.get("data_sources") or {}) + for name, node in ds_from_json.items(): + if name not in base_cfg["data_sources"] and node.get("api_key"): + base_cfg["data_sources"][name] = {"api_key": node.get("api_key")} + logger.info(f"Loaded API key for provider '{name}' from config.json") + else: + logger.debug("config/config.json not found; skipping JSON token load.") + except Exception as e: + logger.warning(f"Failed to read tokens from config/config.json: {e}") + import traceback + traceback.print_exc() + + try: + self._init_providers(base_cfg) + except Exception as e: + logger.error(f"Failed to initialize data providers: {e}") + + self._initialized = True + + def _init_providers(self, base_cfg: Dict[str, Any]) -> None: + """ + Initializes providers with the given base configuration. + This method should be called after the base config is loaded. + """ + provider_map = { + "tushare": TushareProvider, + # "ifind": TonghsProvider, + "yfinance": YfinanceProvider, + "finnhub": FinnhubProvider, + } + + for name, provider_class in provider_map.items(): + token = None + source_config = self.config['data_sources'].get(name, {}) + if source_config and source_config.get('api_key_env'): + token = base_cfg.get("data_sources", {}).get(name, {}).get("api_key") + + # Initialize the provider if a token is found or not required + if token or not source_config.get('api_key_env'): + try: + self.providers[name] = provider_class(token=token) + except Exception as e: + logger.error(f"Failed to initialize provider '{name}': {e}") + else: + logger.warning(f"Provider '{name}' requires token env '{source_config.get('api_key_env')}', but none provided. Skipping.") + + def _detect_market(self, stock_code: str) -> str: + if stock_code.endswith(('.SH', '.SZ')): + return 'CN' + elif stock_code.endswith('.HK'): + return 'HK' + elif stock_code.endswith('.T'): # Assuming .T for Tokyo + return 'JP' + else: # Default to US + return 'US' + + async def get_data(self, method_name: str, stock_code: str, **kwargs): + market = self._detect_market(stock_code) + priority_list = self.config.get('markets', {}).get(market, {}).get('priority', []) + + for provider_name in priority_list: + provider = self.providers.get(provider_name) + if not provider: + logger.warning(f"Provider '{provider_name}' not initialized.") + continue + + try: + method = getattr(provider, method_name) + data = await method(stock_code=stock_code, **kwargs) + if data is not None and (not isinstance(data, list) or data): + logger.info(f"Data successfully fetched from '{provider_name}' for '{stock_code}'.") + return data + except Exception as e: + logger.warning(f"Provider '{provider_name}' failed for '{stock_code}': {e}. Trying next provider.") + + logger.error(f"All data providers failed for '{stock_code}' on method '{method_name}'.") + return None + + async def get_financial_statements(self, stock_code: str, report_dates: List[str]) -> List[Dict[str, Any]]: + return await self.get_data('get_financial_statements', stock_code, report_dates=report_dates) + + async def get_daily_price(self, stock_code: str, start_date: str, end_date: str) -> List[Dict[str, Any]]: + return await self.get_data('get_daily_price', stock_code, start_date=start_date, end_date=end_date) + + async def get_stock_basic(self, stock_code: str) -> Optional[Dict[str, Any]]: + return await self.get_data('get_stock_basic', stock_code) + +data_manager = DataManager() diff --git a/backend/app/data_providers/base.py b/backend/app/data_providers/base.py new file mode 100644 index 0000000..97336d5 --- /dev/null +++ b/backend/app/data_providers/base.py @@ -0,0 +1,71 @@ +from abc import ABC, abstractmethod +from typing import Any, Dict, List, Optional + +class BaseDataProvider(ABC): + """ + Abstract base class for all financial data providers. + """ + + def __init__(self, token: Optional[str] = None): + """ + Initializes the data provider, optionally with an API token. + + :param token: API token for the data provider, if required. + """ + self.token = token + self._initialize() + + def _initialize(self): + """ + Perform any necessary initialization, such as API client setup. + This method is called by the constructor. + """ + pass + + @abstractmethod + async def get_stock_basic(self, stock_code: str) -> Optional[Dict[str, Any]]: + """ + Fetches basic company information for a given stock code. + + :param stock_code: The stock identifier. + :return: A dictionary with basic company info, or None if not found. + """ + pass + + @abstractmethod + async def get_daily_price(self, stock_code: str, start_date: str, end_date: str) -> List[Dict[str, Any]]: + """ + Fetches daily stock prices for a given period. + + :param stock_code: The stock identifier. + :param start_date: The start date of the period (e.g., 'YYYYMMDD'). + :param end_date: The end date of the period (e.g., 'YYYYMMDD'). + :return: A list of dictionaries, each representing a day's price data. + """ + pass + + @abstractmethod + async def get_financial_statements(self, stock_code: str, report_dates: List[str]) -> List[Dict[str, Any]]: + """ + Fetches financial statements for a list of report dates. + + This method should aim to fetch data for all requested dates in a single call if possible + and then combine them into a unified format. + + :param stock_code: The stock identifier. + :param report_dates: A list of report dates to fetch data for (e.g., ['20231231', '20221231']). + :return: A list of dictionaries, each containing financial statement data for a specific period. + """ + pass + + async def get_financial_statement(self, stock_code: str, report_date: str) -> Optional[Dict[str, Any]]: + """ + Fetches a single financial statement for a specific report date. + This is a convenience method that can be implemented by calling get_financial_statements. + + :param stock_code: The stock identifier. + :param report_date: The report date for the statement (e.g., '20231231'). + :return: A dictionary with financial statement data, or None if not found. + """ + results = await self.get_financial_statements(stock_code, [report_date]) + return results[0] if results else None diff --git a/backend/app/data_providers/finnhub.py b/backend/app/data_providers/finnhub.py new file mode 100644 index 0000000..645b7c5 --- /dev/null +++ b/backend/app/data_providers/finnhub.py @@ -0,0 +1,112 @@ +from .base import BaseDataProvider +from typing import Any, Dict, List, Optional +import finnhub +import pandas as pd +from datetime import datetime, timedelta +import asyncio +import logging + +logger = logging.getLogger(__name__) + +class FinnhubProvider(BaseDataProvider): + + def _initialize(self): + if not self.token: + raise ValueError("Finnhub API key not provided.") + self.client = finnhub.Client(api_key=self.token) + + async def get_stock_basic(self, stock_code: str) -> Optional[Dict[str, Any]]: + async def _fetch(): + try: + profile = self.client.company_profile2(symbol=stock_code) + if not profile: + return None + + # Normalize data + return { + "ts_code": stock_code, + "name": profile.get("name"), + "area": profile.get("country"), + "industry": profile.get("finnhubIndustry"), + "exchange": profile.get("exchange"), + "ipo_date": profile.get("ipo"), + } + except Exception as e: + logger.error(f"Finnhub get_stock_basic failed for {stock_code}: {e}") + return None + + loop = asyncio.get_event_loop() + return await loop.run_in_executor(None, _fetch) + + async def get_daily_price(self, stock_code: str, start_date: str, end_date: str) -> List[Dict[str, Any]]: + async def _fetch(): + try: + start_ts = int(datetime.strptime(start_date, '%Y%m%d').timestamp()) + end_ts = int(datetime.strptime(end_date, '%Y%m%d').timestamp()) + + res = self.client.stock_candles(stock_code, 'D', start_ts, end_ts) + if res.get('s') != 'ok': + return [] + + df = pd.DataFrame(res) + if df.empty: + return [] + + # Normalize data + df['trade_date'] = pd.to_datetime(df['t'], unit='s').dt.strftime('%Y%m%d') + df.rename(columns={ + 'o': 'open', 'h': 'high', 'l': 'low', 'c': 'close', 'v': 'vol' + }, inplace=True) + + return df[['trade_date', 'open', 'high', 'low', 'close', 'vol']].to_dict('records') + + except Exception as e: + logger.error(f"Finnhub get_daily_price failed for {stock_code}: {e}") + return [] + + loop = asyncio.get_event_loop() + return await loop.run_in_executor(None, _fetch) + + async def get_financial_statements(self, stock_code: str, report_dates: List[str]) -> List[Dict[str, Any]]: + async def _fetch(): + try: + # Finnhub provides financials as a whole, not by specific date ranges in one call + # We fetch all available and then filter. + # Note: 'freq' can be 'annual' or 'quarterly'. We'll use annual. + res = self.client.financials_reported(symbol=stock_code, freq='annual') + if not res or not res.get('data'): + return [] + + df = pd.DataFrame(res['data']) + + # Filter by requested dates + years_to_fetch = {date[:4] for date in report_dates} + df = df[df['year'].astype(str).isin(years_to_fetch)] + + # The data is deeply nested in 'report'. We need to extract and pivot it. + all_reports = [] + for index, row in df.iterrows(): + report_data = {'ts_code': stock_code, 'end_date': row['endDate']} + + # Extract concepts from balance sheet, income statement, and cash flow + for item in row['report'].get('bs', []): + report_data[item['concept']] = item['value'] + for item in row['report'].get('ic', []): + report_data[item['concept']] = item['value'] + for item in row['report'].get('cf', []): + report_data[item['concept']] = item['value'] + + all_reports.append(report_data) + + # Further normalization of keys would be needed here to match a common format + # e.g. 'AssetsTotal' -> 'total_assets' + # This is a complex task and depends on the desired final schema. + + return all_reports + + except Exception as e: + logger.error(f"Finnhub get_financial_statements failed for {stock_code}: {e}") + return [] + + loop = asyncio.get_event_loop() + return await loop.run_in_executor(None, _fetch) diff --git a/backend/app/data_providers/ifind.py b/backend/app/data_providers/ifind.py new file mode 100644 index 0000000..3f89b18 --- /dev/null +++ b/backend/app/data_providers/ifind.py @@ -0,0 +1,131 @@ +from .base import BaseDataProvider +from typing import Any, Dict, List, Optional +import pandas as pd +from datetime import datetime + +# 假设 iFinDPy 库已安装在环境中 +# 重要提示: 用户需要根据官方文档手动安装 iFinDPy +try: + from iFinDPy import THS_iFinDLogin, THS_BD, THS_HQ +except ImportError: + print("错误: iFinDPy 模块未找到。请确保已按照同花顺官方指引完成安装。") + # 定义虚拟函数以避免在未安装时程序崩溃 + def THS_iFinDLogin(*args, **kwargs): return -1 + def THS_BD(*args, **kwargs): return pd.DataFrame() + def THS_HQ(*args, **kwargs): return pd.DataFrame() + + +class TonghsProvider(BaseDataProvider): + _is_logged_in = False + + def __init__(self, token: Optional[str] = None): + # 使用从 iFinD 用户中心获取的 Refresh Token 进行登录 + if not TonghsProvider._is_logged_in: + if not token: + raise ValueError("同花顺 iFinDPy Refresh Token 未在配置中提供。") + + # 调用登录函数,直接传入 token + # 注意: 具体的关键字参数名可能需要根据 iFinDPy 的实际文档确认,这里假设为 'token' 或直接作为第一个参数 + login_result = THS_iFinDLogin(token) + + if login_result == 0: + print("同花顺 iFinDPy 登录成功。") + TonghsProvider._is_logged_in = True + else: + print(f"同花顺 iFinDPy 登录失败,错误码: {login_result}") + raise ConnectionError("无法登录到同花顺 iFinDPy 服务,请检查您的 Refresh Token 是否正确。") + + async def get_stock_basic(self, stock_code: str) -> Optional[Dict[str, Any]]: + try: + # TODO: 请用户确认用于获取公司基本信息的指标 (indicators) + indicators = "ths_stock_short_name_stock;ths_listed_market_stock;ths_industry_stock;ths_ipo_date_stock" + data = THS_BD(stock_code, indicators, "") + + if data.empty: + return None + + # --- 数据归一化 --- + # iFinDPy 返回的数据通常是 DataFrame,我们需要将其转换为字典 + info = data.iloc[0].to_dict() + + return { + "ts_code": stock_code, + "name": info.get("ths_stock_short_name_stock"), + "area": info.get("ths_listed_market_stock"), + "industry": info.get("ths_industry_stock"), + "list_date": info.get("ths_ipo_date_stock"), + } + except Exception as e: + print(f"同花顺 iFinDPy get_stock_basic 执行失败, 股票代码 {stock_code}: {e}") + return None + + async def get_daily_price(self, stock_code: str, start_date: str, end_date: str) -> List[Dict[str, Any]]: + try: + # TODO: 请用户确认用于获取日线行情的指标 + indicators = "open;high;low;close;volume" + # iFinDPy 的日期格式通常是 YYYY-MM-DD + date_range = f"{start_date};{end_date}" + + data = THS_HQ(stock_code, indicators, date_range) + + if data.empty: + return [] + + # --- 数据归一化 --- + data = data.reset_index() + data.rename(columns={ + "time": "trade_date", + "open": "open", + "high": "high", + "low": "low", + "close": "close", + "volume": "vol" + }, inplace=True) + + return data.to_dict('records') + except Exception as e: + print(f"同花顺 iFinDPy get_daily_price 执行失败, 股票代码 {stock_code}: {e}") + return [] + + async def get_financial_statements(self, stock_code: str, report_dates: List[str]) -> List[Dict[str, Any]]: + try: + # TODO: 请用户确认获取财务报表的指标 + # 这可能需要多次调用 THS_BD 并合并结果 + + # 示例:一次性获取多个报告期的数据 + # 将 report_dates 转换为 iFinDPy 接受的格式,例如 "2022-12-31;2021-12-31" + dates_param = ";".join(report_dates) + + # 需要的指标 + income_indicators = "ths_np_stock" # 净利润 + bs_indicators = "ths_total_assets_stock;ths_total_liab_stock" # 总资产;总负债 + revenue_indicators = "ths_revenue_stock" # 营业收入 + + # 获取数据 + income_data = THS_BD(stock_code, income_indicators, f"reportDate={dates_param}") + bs_data = THS_BD(stock_code, bs_indicators, f"reportDate={dates_param}") + revenue_data = THS_BD(stock_code, revenue_indicators, f"reportDate={dates_param}") + + # 合并数据 + financials_df = pd.concat([income_data, bs_data, revenue_data], axis=1) + financials_df = financials_df.loc[:,~financials_df.columns.duplicated()] + financials_df = financials_df.reset_index().rename(columns={"index": "end_date"}) + + # --- 数据归一化 --- + financials_df.rename(columns={ + "ths_revenue_stock": "revenue", + "ths_np_stock": "net_income", + "ths_total_assets_stock": "total_assets", + "ths_total_liab_stock": "total_liabilities", + }, inplace=True) + + financials_df["ts_code"] = stock_code + + return financials_df.to_dict('records') + except Exception as e: + print(f"同花顺 iFinDPy get_financial_statements 执行失败, 股票代码 {stock_code}: {e}") + return [] + + async def get_financial_statement(self, stock_code: str, report_date: str) -> Optional[Dict[str, Any]]: + results = await self.get_financial_statements(stock_code, [report_date]) + return results[0] if results else None diff --git a/backend/app/data_providers/tushare.py b/backend/app/data_providers/tushare.py new file mode 100644 index 0000000..79111c0 --- /dev/null +++ b/backend/app/data_providers/tushare.py @@ -0,0 +1,132 @@ +from .base import BaseDataProvider +from typing import Any, Dict, List, Optional +import httpx +import logging +import asyncio + +logger = logging.getLogger(__name__) + +TUSHARE_PRO_URL = "https://api.tushare.pro" + +class TushareProvider(BaseDataProvider): + + def _initialize(self): + if not self.token: + raise ValueError("Tushare API token not provided.") + # Use httpx.AsyncClient directly + self._client = httpx.AsyncClient(timeout=30) + + async def _query( + self, + api_name: str, + params: Optional[Dict[str, Any]] = None, + fields: Optional[str] = None, + ) -> List[Dict[str, Any]]: + payload = { + "api_name": api_name, + "token": self.token, + "params": params or {}, + } + if "limit" not in payload["params"]: + payload["params"]["limit"] = 5000 + if fields: + payload["fields"] = fields + + logger.info(f"Querying Tushare API '{api_name}' with params: {params}") + + try: + resp = await self._client.post(TUSHARE_PRO_URL, json=payload) + resp.raise_for_status() + data = resp.json() + + if data.get("code") != 0: + err_msg = data.get("msg") or "Unknown Tushare error" + logger.error(f"Tushare API error for '{api_name}': {err_msg}") + raise RuntimeError(f"{api_name}: {err_msg}") + + fields_def = data.get("data", {}).get("fields", []) + items = data.get("data", {}).get("items", []) + + rows: List[Dict[str, Any]] = [] + for it in items: + row = {fields_def[i]: it[i] for i in range(len(fields_def))} + rows.append(row) + + logger.info(f"Tushare API '{api_name}' returned {len(rows)} rows.") + return rows + + except httpx.HTTPStatusError as e: + logger.error(f"HTTP error calling Tushare API '{api_name}': {e.response.status_code} - {e.response.text}") + raise + except Exception as e: + logger.error(f"Exception calling Tushare API '{api_name}': {e}") + raise + + async def get_stock_basic(self, stock_code: str) -> Optional[Dict[str, Any]]: + try: + rows = await self._query( + api_name="stock_basic", + params={"ts_code": stock_code}, + ) + return rows[0] if rows else None + except Exception as e: + logger.error(f"Tushare get_stock_basic failed for {stock_code}: {e}") + return None + + async def get_daily_price(self, stock_code: str, start_date: str, end_date: str) -> List[Dict[str, Any]]: + try: + return await self._query( + api_name="daily", + params={ + "ts_code": stock_code, + "start_date": start_date, + "end_date": end_date, + }, + ) + except Exception as e: + logger.error(f"Tushare get_daily_price failed for {stock_code}: {e}") + return [] + + async def get_financial_statements(self, stock_code: str, report_dates: List[str]) -> List[Dict[str, Any]]: + all_statements: List[Dict[str, Any]] = [] + for date in report_dates: + logger.info(f"Fetching financial statements for {stock_code}, report date: {date}") + try: + bs_rows, ic_rows, cf_rows = await asyncio.gather( + self._query( + api_name="balancesheet", + params={"ts_code": stock_code, "period": date, "report_type": 1}, + ), + self._query( + api_name="income", + params={"ts_code": stock_code, "period": date, "report_type": 1}, + ), + self._query( + api_name="cashflow", + params={"ts_code": stock_code, "period": date, "report_type": 1}, + ) + ) + + if not bs_rows and not ic_rows and not cf_rows: + logger.warning(f"No financial statements components found from Tushare for {stock_code} on {date}") + continue + + merged: Dict[str, Any] = {"ts_code": stock_code, "end_date": date} + bs_data = bs_rows[0] if bs_rows else {} + ic_data = ic_rows[0] if ic_rows else {} + cf_data = cf_rows[0] if cf_rows else {} + + merged.update(bs_data) + merged.update(ic_data) + merged.update(cf_data) + + merged["end_date"] = merged.get("end_date") or merged.get("period") or date + logger.debug(f"Merged statement for {date} has keys: {list(merged.keys())}") + + all_statements.append(merged) + except Exception as e: + logger.error(f"Tushare get_financial_statement failed for {stock_code} on {date}: {e}") + continue + + logger.info(f"Successfully fetched {len(all_statements)} statement(s) for {stock_code}.") + return all_statements diff --git a/backend/app/data_providers/yfinance.py b/backend/app/data_providers/yfinance.py new file mode 100644 index 0000000..1c92de9 --- /dev/null +++ b/backend/app/data_providers/yfinance.py @@ -0,0 +1,114 @@ +from .base import BaseDataProvider +from typing import Any, Dict, List, Optional +import yfinance as yf +import pandas as pd +from datetime import datetime +import asyncio +import logging + +logger = logging.getLogger(__name__) + +class YfinanceProvider(BaseDataProvider): + + def _map_stock_code(self, stock_code: str) -> str: + # yfinance uses different tickers for CN market + if stock_code.endswith('.SH'): + return stock_code.replace('.SH', '.SS') + elif stock_code.endswith('.SZ'): + # For Shenzhen stocks, try without suffix first, then with .SZ + base_code = stock_code.replace('.SZ', '') + return base_code # Try without suffix first + return stock_code + + async def get_stock_basic(self, stock_code: str) -> Optional[Dict[str, Any]]: + async def _fetch(): + try: + ticker = yf.Ticker(self._map_stock_code(stock_code)) + info = ticker.info + + # Normalize data to match expected format + return { + "ts_code": stock_code, + "name": info.get("longName"), + "area": info.get("country"), + "industry": info.get("industry"), + "market": info.get("market"), + "exchange": info.get("exchange"), + "list_date": datetime.fromtimestamp(info.get("firstTradeDateEpoch", 0)).strftime('%Y%m%d') if info.get("firstTradeDateEpoch") else None, + } + except Exception as e: + logger.error(f"yfinance get_stock_basic failed for {stock_code}: {e}") + return None + + loop = asyncio.get_event_loop() + return await loop.run_in_executor(None, _fetch) + + async def get_daily_price(self, stock_code: str, start_date: str, end_date: str) -> List[Dict[str, Any]]: + async def _fetch(): + try: + # yfinance date format is YYYY-MM-DD + start_fmt = datetime.strptime(start_date, '%Y%m%d').strftime('%Y-%m-%d') + end_fmt = datetime.strptime(end_date, '%Y%m%d').strftime('%Y-%m-%d') + + ticker = yf.Ticker(self._map_stock_code(stock_code)) + df = ticker.history(start=start_fmt, end=end_fmt) + + df.reset_index(inplace=True) + # Normalize column names + df.rename(columns={ + "Date": "trade_date", + "Open": "open", "High": "high", "Low": "low", "Close": "close", + "Volume": "vol" + }, inplace=True) + df['trade_date'] = df['trade_date'].dt.strftime('%Y%m%d') + return df.to_dict('records') + except Exception as e: + logger.error(f"yfinance get_daily_price failed for {stock_code}: {e}") + return [] + + loop = asyncio.get_event_loop() + return await loop.run_in_executor(None, _fetch) + + async def get_financial_statements(self, stock_code: str, report_dates: List[str]) -> List[Dict[str, Any]]: + def _fetch(): + try: + ticker = yf.Ticker(self._map_stock_code(stock_code)) + + # yfinance provides financials quarterly or annually. We'll fetch annually and try to match the dates. + # Note: This is an approximation as yfinance does not allow fetching by specific end-of-year dates. + df_financials = ticker.financials.transpose() + df_balance = ticker.balance_sheet.transpose() + df_cashflow = ticker.cash_flow.transpose() + + if df_financials.empty and df_balance.empty and df_cashflow.empty: + return [] + + # Combine the data + df_combined = pd.concat([df_financials, df_balance, df_cashflow], axis=1) + df_combined.index.name = 'end_date' + df_combined.reset_index(inplace=True) + df_combined['end_date_str'] = df_combined['end_date'].dt.strftime('%Y%m%d') + + # Filter by requested dates (allowing for some flexibility if exact match not found) + # This simplistic filtering might need to be more robust. + # For now, we assume the yearly data maps to the year in report_dates. + years_to_fetch = {date[:4] for date in report_dates} + df_combined = df_combined[df_combined['end_date'].dt.year.astype(str).isin(years_to_fetch)] + + # Data Normalization (yfinance columns are different from Tushare) + # This is a sample, a more comprehensive mapping would be required. + df_combined.rename(columns={ + "Total Revenue": "revenue", + "Net Income": "net_income", + "Total Assets": "total_assets", + "Total Liab": "total_liabilities", + }, inplace=True, errors='ignore') + + return df_combined.to_dict('records') + + except Exception as e: + logger.error(f"yfinance get_financial_statements failed for {stock_code}: {e}") + return [] + + loop = asyncio.get_event_loop() + return await loop.run_in_executor(None, _fetch) diff --git a/backend/app/main.py b/backend/app/main.py index c32ab0d..ef53e7e 100644 --- a/backend/app/main.py +++ b/backend/app/main.py @@ -9,12 +9,32 @@ from app.core.config import settings from app.routers.config import router as config_router from app.routers.financial import router as financial_router -# Configure logging -logging.basicConfig( - level=logging.INFO, - format='%(asctime)s - %(levelname)s: %(message)s', - datefmt='%H:%M:%S' -) +# Configure logging to ensure our app logs show up in development +import sys + +# Force our logging configuration to override uvicorn's +class ForcefulHandler(logging.Handler): + def emit(self, record): + # Force output to stdout regardless of uvicorn's configuration + print(f"[APP] {record.getMessage()}", file=sys.stdout, flush=True) + +# Set up our forceful handler for data providers +forceful_handler = ForcefulHandler() +forceful_handler.setLevel(logging.DEBUG) + +# Configure data providers logger with forceful output +data_providers_logger = logging.getLogger('app.data_providers') +data_providers_logger.setLevel(logging.DEBUG) +data_providers_logger.addHandler(forceful_handler) + +# Also set up for the main app logger +app_logger = logging.getLogger('app') +app_logger.setLevel(logging.INFO) +app_logger.addHandler(forceful_handler) + +# Ensure our handlers are not suppressed +data_providers_logger.propagate = False +app_logger.propagate = False app = FastAPI(title=settings.APP_NAME, version=settings.APP_VERSION) diff --git a/backend/app/routers/financial.py b/backend/app/routers/financial.py index c286a01..440dfa9 100644 --- a/backend/app/routers/financial.py +++ b/backend/app/routers/financial.py @@ -9,7 +9,6 @@ from typing import Dict, List from fastapi import APIRouter, HTTPException, Query from fastapi.responses import StreamingResponse -import os from app.core.config import settings from app.schemas.financial import ( @@ -21,10 +20,29 @@ from app.schemas.financial import ( AnalysisResponse, AnalysisConfigResponse ) -from app.services.tushare_client import TushareClient from app.services.company_profile_client import CompanyProfileClient from app.services.analysis_client import AnalysisClient, load_analysis_config, get_analysis_config +# Lazy DataManager loader to avoid import-time failures when optional providers/config are missing +_dm = None +def get_dm(): + global _dm + if _dm is not None: + return _dm + try: + from app.data_manager import data_manager as real_dm + _dm = real_dm + return _dm + except Exception: + class _StubDM: + config = {} + async def get_stock_basic(self, stock_code: str): + return None + async def get_financial_statements(self, stock_code: str, report_dates): + return [] + _dm = _StubDM() + return _dm + router = APIRouter() # Load metric config from file (project root is repo root, not backend/) @@ -45,6 +63,22 @@ def _load_json(path: str) -> Dict: return {} +@router.get("/data-sources", response_model=Dict[str, List[str]]) +async def get_data_sources(): + """ + Get the list of data sources that require an API key from the config. + """ + try: + data_sources_config = get_dm().config.get("data_sources", {}) + sources_requiring_keys = [ + source for source, config in data_sources_config.items() + if config.get("api_key_env") + ] + return {"sources": sources_requiring_keys} + except Exception as e: + raise HTTPException(status_code=500, detail=f"Failed to load data sources configuration: {e}") + + @router.post("/china/{ts_code}/analysis", response_model=List[AnalysisResponse]) async def generate_full_analysis( ts_code: str, @@ -150,13 +184,12 @@ async def generate_full_analysis( if not company_name: logger.info(f"[API] Fetching company name for {ts_code}") try: - token = base_cfg.get("data_sources", {}).get("tushare", {}).get("api_key") - if token: - tushare_client = TushareClient(token=token) - basic_data = await tushare_client.query(api_name="stock_basic", params={"ts_code": ts_code}, fields="ts_code,name") - if basic_data: - company_name = basic_data[0].get("name", ts_code) - logger.info(f"[API] Got company name: {company_name}") + basic_data = await get_dm().get_stock_basic(stock_code=ts_code) + if basic_data: + company_name = basic_data.get("name", ts_code) + logger.info(f"[API] Got company name: {company_name}") + else: + company_name = ts_code except Exception as e: logger.warning(f"Failed to get company name, proceeding with ts_code. Error: {e}") company_name = ts_code @@ -228,176 +261,82 @@ async def get_china_financials( ts_code: str, years: int = Query(5, ge=1, le=15), ): - # Load Tushare token - base_cfg = _load_json(BASE_CONFIG_PATH) - token = ( - os.environ.get("TUSHARE_TOKEN") - or settings.TUSHARE_TOKEN - or base_cfg.get("data_sources", {}).get("tushare", {}).get("api_key") - ) - if not token: - raise HTTPException(status_code=500, detail="Tushare API token not configured. Set TUSHARE_TOKEN env or config/config.json data_sources.tushare.api_key") - # Load metric config fin_cfg = _load_json(FINANCIAL_CONFIG_PATH) api_groups: Dict[str, List[Dict]] = fin_cfg.get("api_groups", {}) - client = TushareClient(token=token) - # Meta tracking started_real = datetime.now(timezone.utc) started = time.perf_counter_ns() - api_calls_total = 0 + api_calls_total = 0 # This will be harder to track now, maybe DataManager should provide it api_calls_by_group: Dict[str, int] = {} steps: List[StepRecord] = [] - current_action = "初始化" - - # Get company name from stock_basic API - company_name = None + + # Get company name + company_name = ts_code try: - basic_data = await client.query(api_name="stock_basic", params={"ts_code": ts_code}, fields="ts_code,name") - api_calls_total += 1 - if basic_data and len(basic_data) > 0: - company_name = basic_data[0].get("name") + basic_data = await get_dm().get_stock_basic(stock_code=ts_code) + if basic_data: + company_name = basic_data.get("name", ts_code) except Exception: - # If getting company name fails, continue without it - pass + pass # Continue without it # Collect series per metric key series: Dict[str, List[Dict]] = {} - - # Helper to store year-value pairs while keeping most recent per year - def _merge_year_value(key: str, year: str, value, month: int = None): - arr = series.setdefault(key, []) - # upsert by year - for item in arr: - if item["year"] == year: - item["value"] = value - if month is not None: - item["month"] = month - return - arr.append({"year": year, "value": value, "month": month}) - - # Query each API group we care errors: Dict[str, str] = {} - for group_name, metrics in api_groups.items(): - step = StepRecord( - name=f"拉取 {group_name}", - start_ts=started_real.isoformat(), - status="running", - ) - steps.append(step) - current_action = step.name - if not metrics: - continue - - # 按 API 分组 metrics(处理 unknown 组中有多个不同 API 的情况) - api_groups_dict: Dict[str, List[Dict]] = {} - for metric in metrics: - api = metric.get("api") or group_name - if api: # 跳过空 API - if api not in api_groups_dict: - api_groups_dict[api] = [] - api_groups_dict[api].append(metric) - - # 对每个 API 分别处理 - for api_name, api_metrics in api_groups_dict.items(): - fields = [m.get("tushareParam") for m in api_metrics if m.get("tushareParam")] - if not fields: - continue + + # Generate date range for financial statements + current_year = datetime.now().year + report_dates = [f"{year}1231" for year in range(current_year - years, current_year + 1)] - date_field = "end_date" if group_name in ("fina_indicator", "income", "balancesheet", "cashflow") else "trade_date" - - # 构建 API 参数 - params = {"ts_code": ts_code, "limit": 5000} - - # 对于需要日期范围的 API(如 stk_holdernumber),添加日期参数 - if api_name == "stk_holdernumber": - # 计算日期范围:从 years 年前到现在 - end_date = datetime.now().strftime("%Y%m%d") - start_date = (datetime.now() - timedelta(days=years * 365)).strftime("%Y%m%d") - params["start_date"] = start_date - params["end_date"] = end_date - # stk_holdernumber 返回的日期字段通常是 end_date - date_field = "end_date" - - # 对于非时间序列 API(如 stock_company),标记为静态数据 - is_static_data = api_name == "stock_company" - - # 构建 fields 字符串:包含日期字段和所有需要的指标字段 - # 确保日期字段存在,因为我们需要用它来确定年份 - fields_list = list(fields) - if date_field not in fields_list: - fields_list.insert(0, date_field) - # 对于 fina_indicator 等 API,通常还需要 ts_code 和 ann_date - if api_name in ("fina_indicator", "income", "balancesheet", "cashflow"): - for req_field in ["ts_code", "ann_date"]: - if req_field not in fields_list: - fields_list.insert(0, req_field) - fields_str = ",".join(fields_list) - - try: - data_rows = await client.query(api_name=api_name, params=params, fields=fields_str) - api_calls_total += 1 - api_calls_by_group[group_name] = api_calls_by_group.get(group_name, 0) + 1 - except Exception as e: - # 记录错误但继续处理其他 API - error_key = f"{group_name}_{api_name}" - errors[error_key] = str(e) - continue + # Fetch all financial statements at once + step_financials = StepRecord(name="拉取财务报表", start_ts=started_real.isoformat(), status="running") + steps.append(step_financials) + + all_financial_data = await get_dm().get_financial_statements(stock_code=ts_code, report_dates=report_dates) + + if all_financial_data: + # Process financial data into the 'series' format + for report in all_financial_data: + year = report.get("end_date", "")[:4] + for key, value in report.items(): + # Skip non-numeric fields like ts_code, end_date, ann_date, etc. + if key in ['ts_code', 'end_date', 'ann_date', 'f_ann_date', 'report_type', 'comp_type', 'end_type', 'update_flag']: + continue - tmp: Dict[str, Dict] = {} - current_year = datetime.now().strftime("%Y") - - for row in data_rows: - if is_static_data: - # 对于静态数据(如 stock_company),使用当前年份 - # 只处理第一行数据,因为静态数据通常只有一行 - if current_year not in tmp: - year = current_year - month = None - tmp[year] = row - tmp[year]['_month'] = month - # 跳过后续行 - continue - else: - # 对于时间序列数据,按日期字段处理 - date_val = row.get(date_field) - if not date_val: - continue - year = str(date_val)[:4] - month = int(str(date_val)[4:6]) if len(str(date_val)) >= 6 else None - existing = tmp.get(year) - if existing is None or str(row.get(date_field)) > str(existing.get(date_field)): - tmp[year] = row - tmp[year]['_month'] = month - - for metric in api_metrics: - key = metric.get("tushareParam") - if not key: - continue - for year, row in tmp.items(): - month = row.get('_month') - _merge_year_value(key, year, row.get(key), month) - - step.status = "done" - step.end_ts = datetime.now(timezone.utc).isoformat() - step.duration_ms = int((time.perf_counter_ns() - started) / 1_000_000) + # Only include numeric values + if isinstance(value, (int, float)) and value is not None: + if key not in series: + series[key] = [] + + # Avoid duplicates for the same year + if not any(d['year'] == year for d in series[key]): + series[key].append({"year": year, "value": value}) + else: + errors["financial_statements"] = "Failed to fetch from all providers." + + step_financials.status = "done" + step_financials.end_ts = datetime.now(timezone.utc).isoformat() + step_financials.duration_ms = int((time.perf_counter_ns() - started) / 1_000_000) + + # --- Potentially fetch other data types like daily prices if needed by config --- + # This part is simplified. The original code had complex logic for different api_groups. + # We will assume for now that the main data comes from financial_statements. + # The logic can be extended here to call other data_manager methods based on `fin_cfg`. finished_real = datetime.now(timezone.utc) elapsed_ms = int((time.perf_counter_ns() - started) / 1_000_000) if not series: - # If nothing succeeded, expose partial error info - raise HTTPException(status_code=502, detail={"message": "No data returned from Tushare", "errors": errors}) + raise HTTPException(status_code=502, detail={"message": "No data returned from any data source", "errors": errors}) - # Truncate years and sort + # Truncate years and sort (the data should already be mostly correct, but we ensure) for key, arr in series.items(): # Deduplicate and sort desc by year, then cut to requested years, and return asc uniq = {item["year"]: item for item in arr} arr_sorted_desc = sorted(uniq.values(), key=lambda x: x["year"], reverse=True) arr_limited = arr_sorted_desc[:years] - arr_sorted = sorted(arr_limited, key=lambda x: x["year"]) # ascending by year + arr_sorted = sorted(arr_limited, key=lambda x: x["year"]) series[key] = arr_sorted meta = FinancialMeta( @@ -452,21 +391,10 @@ async def get_company_profile( logger.info(f"[API] Fetching company name for {ts_code}") # Try to get from stock_basic API try: - base_cfg = _load_json(BASE_CONFIG_PATH) - token = ( - os.environ.get("TUSHARE_TOKEN") - or settings.TUSHARE_TOKEN - or base_cfg.get("data_sources", {}).get("tushare", {}).get("api_key") - ) - if token: - from app.services.tushare_client import TushareClient - tushare_client = TushareClient(token=token) - basic_data = await tushare_client.query(api_name="stock_basic", params={"ts_code": ts_code}, fields="ts_code,name") - if basic_data and len(basic_data) > 0: - company_name = basic_data[0].get("name", ts_code) - logger.info(f"[API] Got company name: {company_name}") - else: - company_name = ts_code + basic_data = await get_dm().get_stock_basic(stock_code=ts_code) + if basic_data: + company_name = basic_data.get("name", ts_code) + logger.info(f"[API] Got company name: {company_name}") else: company_name = ts_code except Exception as e: @@ -588,76 +516,25 @@ async def generate_analysis( if not company_name: logger.info(f"[API] Fetching company name and financial data for {ts_code}") try: - token = ( - os.environ.get("TUSHARE_TOKEN") - or settings.TUSHARE_TOKEN - or base_cfg.get("data_sources", {}).get("tushare", {}).get("api_key") - ) - if token: - tushare_client = TushareClient(token=token) - basic_data = await tushare_client.query(api_name="stock_basic", params={"ts_code": ts_code}, fields="ts_code,name") - if basic_data and len(basic_data) > 0: - company_name = basic_data[0].get("name", ts_code) - logger.info(f"[API] Got company name: {company_name}") + basic_data = await get_dm().get_stock_basic(stock_code=ts_code) + if basic_data: + company_name = basic_data.get("name", ts_code) + logger.info(f"[API] Got company name: {company_name}") - # Try to get financial data for context - try: - fin_cfg = _load_json(FINANCIAL_CONFIG_PATH) - api_groups = fin_cfg.get("api_groups", {}) - - # Get financial data summary for context - series: Dict[str, List[Dict]] = {} - for group_name, metrics in api_groups.items(): - if not metrics: - continue - api_groups_dict: Dict[str, List[Dict]] = {} - for metric in metrics: - api = metric.get("api") or group_name - if api: - if api not in api_groups_dict: - api_groups_dict[api] = [] - api_groups_dict[api].append(metric) - - for api_name, api_metrics in api_groups_dict.items(): - fields = [m.get("tushareParam") for m in api_metrics if m.get("tushareParam")] - if not fields: - continue - - date_field = "end_date" if group_name in ("fina_indicator", "income", "balancesheet", "cashflow") else "trade_date" - - params = {"ts_code": ts_code, "limit": 500} - fields_list = list(fields) - if date_field not in fields_list: - fields_list.insert(0, date_field) - if api_name in ("fina_indicator", "income", "balancesheet", "cashflow"): - for req_field in ["ts_code", "ann_date"]: - if req_field not in fields_list: - fields_list.insert(0, req_field) - fields_str = ",".join(fields_list) - - try: - data_rows = await tushare_client.query(api_name=api_name, params=params, fields=fields_str) - if data_rows: - # Get latest year's data - latest_row = data_rows[0] if data_rows else {} - for metric in api_metrics: - key = metric.get("tushareParam") - if key and key in latest_row: - if key not in series: - series[key] = [] - series[key].append({ - "year": latest_row.get(date_field, "")[:4] if latest_row.get(date_field) else str(datetime.now().year), - "value": latest_row.get(key) - }) - except Exception: - pass - - financial_data = {"series": series} - except Exception as e: - logger.warning(f"[API] Failed to get financial data: {e}") - financial_data = None - else: - company_name = ts_code + # Try to get financial data for context + try: + # A simplified approach to get the latest year's financial data + current_year = datetime.now().year + report_dates = [f"{current_year-1}1231"] # Get last year's report + latest_financials = await get_dm().get_financial_statements( + stock_code=ts_code, + report_dates=report_dates + ) + if latest_financials: + financial_data = {"series": latest_financials[0]} + except Exception as e: + logger.warning(f"[API] Failed to get financial data: {e}") + financial_data = None else: company_name = ts_code except Exception as e: @@ -755,3 +632,130 @@ async def generate_analysis( success=result.get("success", False), error=result.get("error") ) + + +@router.get("/china/{ts_code}/analysis/{analysis_type}/stream") +async def stream_analysis( + ts_code: str, + analysis_type: str, + company_name: str = Query(None, description="Company name for better context"), +): + """ + Stream analysis content chunks for a given module using OpenAI-compatible streaming. + Plain text streaming (text/plain; utf-8). Dependencies are resolved first (non-stream), + then the target module content is streamed. + """ + import logging + logger = logging.getLogger(__name__) + + logger.info(f"[API] Streaming analysis requested for {ts_code}, type: {analysis_type}") + + # Load config + base_cfg = _load_json(BASE_CONFIG_PATH) + llm_provider = base_cfg.get("llm", {}).get("provider", "gemini") + llm_config = base_cfg.get("llm", {}).get(llm_provider, {}) + + api_key = llm_config.get("api_key") + base_url = llm_config.get("base_url") + + if not api_key: + logger.error(f"[API] API key for {llm_provider} not configured") + raise HTTPException(status_code=500, detail=f"API key for {llm_provider} not configured.") + + # Get analysis configuration + analysis_cfg = get_analysis_config(analysis_type) + if not analysis_cfg: + raise HTTPException(status_code=404, detail=f"Analysis type '{analysis_type}' not found in configuration") + + model = analysis_cfg.get("model", "gemini-2.5-flash") + prompt_template = analysis_cfg.get("prompt_template", "") + if not prompt_template: + raise HTTPException(status_code=500, detail=f"Prompt template not found for analysis type '{analysis_type}'") + + # Get company name from ts_code if not provided; we don't need full financials here + financial_data = None + if not company_name: + try: + basic_data = await get_dm().get_stock_basic(stock_code=ts_code) + if basic_data: + company_name = basic_data.get("name", ts_code) + else: + company_name = ts_code + except Exception: + company_name = ts_code + + # Resolve dependency context (non-streaming) + context = {} + try: + dependencies = analysis_cfg.get("dependencies", []) or [] + if dependencies: + analysis_config_full = load_analysis_config() + modules_config = analysis_config_full.get("analysis_modules", {}) + + all_required = set() + def collect_all(mod_name: str): + for dep in modules_config.get(mod_name, {}).get("dependencies", []) or []: + if dep not in all_required: + all_required.add(dep) + collect_all(dep) + for dep in dependencies: + all_required.add(dep) + collect_all(dep) + + graph = {name: [d for d in (modules_config.get(name, {}).get("dependencies", []) or []) if d in all_required] for name in all_required} + in_degree = {u: 0 for u in graph} + for u, deps in graph.items(): + for v in deps: + in_degree[v] += 1 + queue = [u for u, deg in in_degree.items() if deg == 0] + order = [] + while queue: + u = queue.pop(0) + order.append(u) + for v in graph.get(u, []): + in_degree[v] -= 1 + if in_degree[v] == 0: + queue.append(v) + if len(order) != len(graph): + order = list(all_required) + + completed = {} + for mod in order: + cfg = modules_config.get(mod, {}) + dep_ctx = {d: completed.get(d, "") for d in (cfg.get("dependencies", []) or [])} + dep_client = AnalysisClient(api_key=api_key, base_url=base_url, model=cfg.get("model", model)) + dep_result = await dep_client.generate_analysis( + analysis_type=mod, + company_name=company_name, + ts_code=ts_code, + prompt_template=cfg.get("prompt_template", ""), + financial_data=financial_data, + context=dep_ctx, + ) + completed[mod] = dep_result.get("content", "") if dep_result.get("success") else "" + context = {dep: completed.get(dep, "") for dep in dependencies} + except Exception: + context = {} + + client = AnalysisClient(api_key=api_key, base_url=base_url, model=model) + + async def streamer(): + # Optional header line to help client-side UI + header = f"# {analysis_cfg.get('name', analysis_type)}\n\n" + yield header + async for chunk in client.generate_analysis_stream( + analysis_type=analysis_type, + company_name=company_name, + ts_code=ts_code, + prompt_template=prompt_template, + financial_data=financial_data, + context=context, + ): + yield chunk + + headers = { + # 禁止中间层缓冲,确保尽快把分块推送给客户端 + "Cache-Control": "no-cache, no-transform", + "X-Accel-Buffering": "no", + } + return StreamingResponse(streamer(), media_type="text/plain; charset=utf-8", headers=headers) diff --git a/backend/app/services/analysis_client.py b/backend/app/services/analysis_client.py index dca85a7..6f26657 100644 --- a/backend/app/services/analysis_client.py +++ b/backend/app/services/analysis_client.py @@ -14,7 +14,8 @@ class AnalysisClient: def __init__(self, api_key: str, base_url: str, model: str): """Initialize OpenAI client with API key, base URL, and model""" - self.client = openai.AsyncOpenAI(api_key=api_key, base_url=base_url) + # Increase client timeout to allow long-running analysis (5 minutes) + self.client = openai.AsyncOpenAI(api_key=api_key, base_url=base_url, timeout=300.0) self.model_name = model async def generate_analysis( @@ -56,6 +57,7 @@ class AnalysisClient: response = await self.client.chat.completions.create( model=self.model_name, messages=[{"role": "user", "content": prompt}], + timeout=300.0, ) content = response.choices[0].message.content if response.choices else "" @@ -130,6 +132,51 @@ class AnalysisClient: return prompt + async def generate_analysis_stream( + self, + analysis_type: str, + company_name: str, + ts_code: str, + prompt_template: str, + financial_data: Optional[Dict] = None, + context: Optional[Dict] = None + ): + """Yield analysis content chunks using OpenAI-compatible streaming API. + + Yields plain text chunks as they arrive. + """ + # Build prompt + prompt = self._build_prompt( + prompt_template, + company_name, + ts_code, + financial_data, + context, + ) + + try: + stream = await self.client.chat.completions.create( + model=self.model_name, + messages=[{"role": "user", "content": prompt}], + stream=True, + timeout=300.0, + ) + + # The SDK yields events with incremental deltas + async for event in stream: + try: + choice = event.choices[0] if getattr(event, "choices", None) else None + delta = getattr(choice, "delta", None) if choice is not None else None + content = getattr(delta, "content", None) if delta is not None else None + if content: + yield content + except Exception: + # Best-effort: ignore malformed chunks + continue + except Exception as e: + # Emit error message to the stream so the client can surface it + yield f"\n\n[错误] {type(e).__name__}: {str(e)}\n" + def load_analysis_config() -> Dict: """Load analysis configuration from JSON file""" diff --git a/backend/app/services/tushare_client.py b/backend/app/services/tushare_client.py deleted file mode 100644 index da901ad..0000000 --- a/backend/app/services/tushare_client.py +++ /dev/null @@ -1,52 +0,0 @@ -""" -Minimal async client for Tushare Pro API -""" -from typing import Any, Dict, List, Optional -import httpx - -TUSHARE_PRO_URL = "https://api.tushare.pro" - - -class TushareClient: - def __init__(self, token: str): - self.token = token - self._client = httpx.AsyncClient(timeout=30) - - async def query( - self, - api_name: str, - params: Optional[Dict[str, Any]] = None, - fields: Optional[str] = None, - ) -> List[Dict[str, Any]]: - payload = { - "api_name": api_name, - "token": self.token, - "params": params or {}, - } - # default larger page size if not provided - if "limit" not in payload["params"]: - payload["params"]["limit"] = 5000 - if fields: - payload["fields"] = fields - resp = await self._client.post(TUSHARE_PRO_URL, json=payload) - resp.raise_for_status() - data = resp.json() - if data.get("code") != 0: - err = data.get("msg") or "Tushare error" - raise RuntimeError(f"{api_name}: {err}") - fields_def = data.get("data", {}).get("fields", []) - items = data.get("data", {}).get("items", []) - rows: List[Dict[str, Any]] = [] - for it in items: - row = {fields_def[i]: it[i] for i in range(len(fields_def))} - rows.append(row) - return rows - - async def aclose(self): - await self._client.aclose() - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.aclose() diff --git a/backend/requirements.txt b/backend/requirements.txt index eab13e3..483cd49 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -7,3 +7,11 @@ aiosqlite==0.20.0 alembic==1.13.3 openai==1.37.0 asyncpg +greenlet==3.0.3 + +# Data Providers +tushare==1.4.1 +yfinance==0.2.37 +finnhub-python==2.4.20 +pandas==2.2.2 +PyYAML==6.0.1 diff --git a/config/analysis-config.json b/config/analysis-config.json index a80f5a8..dc886b2 100644 --- a/config/analysis-config.json +++ b/config/analysis-config.json @@ -2,49 +2,49 @@ "analysis_modules": { "company_profile": { "name": "公司简介", - "model": "gemini-2.5-flash", + "model": "qwen-flash", "prompt_template": "您是一位专业的证券市场分析师。请为公司 {company_name} (股票代码: {ts_code}) 生成一份详细且专业的公司介绍。开头不要自我介绍,直接开始正文。正文用MarkDown输出,尽量说明信息来源,用斜体显示信息来源。在生成内容时,请严格遵循以下要求并采用清晰、结构化的格式:\n\n1. **公司概览**:\n * 简要介绍公司的性质、核心业务领域及其在行业中的定位。\n * 提炼并阐述公司的核心价值理念。\n\n2. **主营业务**:\n * 详细描述公司主要的**产品或服务**。\n * **重要提示**:如果能获取到公司最新的官方**年报**或**财务报告**,请从中提取各主要产品/服务线的**收入金额**和其占公司总收入的**百分比**。请**明确标注数据来源**(例如:\"数据来源于XX年年度报告\")。\n * **严格禁止**编造或估算任何财务数据。若无法找到公开、准确的财务数据,请**不要**在这一点中提及具体金额或比例,仅描述业务内容。\n\n3. **发展历程**:\n * 以时间线或关键事件的形式,概述公司自成立以来的主要**里程碑事件**、重大发展阶段、战略转型或重要成就。\n\n4. **核心团队**:\n * 介绍公司**主要管理层和核心技术团队成员**。\n * 对于每位核心成员,提供其**职务、主要工作履历、教育背景**。\n * 如果公开可查,可补充其**出生年份**。\n\n5. **供应链**:\n * 描述公司的**主要原材料、部件或服务来源**。\n * 如果公开信息中包含,请列出**主要供应商名称**,并**明确其在总采购金额中的大致占比**。若无此数据,则仅描述采购模式。\n\n6. **主要客户及销售模式**:\n * 阐明公司的**销售模式**(例如:直销、经销、线上销售、代理等)。\n * 列出公司的**主要客户群体**或**代表性大客户**。\n * 如果公开信息中包含,请标明**主要客户(或前五大客户)的销售额占公司总销售额的比例**。若无此数据,则仅描述客户类型。\n\n7. **未来展望**:\n * 基于公司**公开的官方声明、管理层访谈或战略规划**,总结公司未来的发展方向、战略目标、重点项目或市场预期。请确保此部分内容有可靠的信息来源支持。" }, "fundamental_analysis": { "name": "基本面分析", - "model": "gemini-2.5-flash", + "model": "qwen-flash", "prompt_template": "# 角色\n你是一位专注于长期价值投资的顶级证券分析师,擅长从基本面出发,对公司进行深入、全面的分析。你的分析报告以客观、严谨、逻辑清晰、数据详实著称。\n# 任务\n为公司 {company_name} (股票代码: {ts_code}) 生成一份全面、专业、结构化的投资分析报告。\n# 输出要求\n直接开始:不要进行任何自我介绍或客套话,直接输出报告正文。\nMarkdown格式:使用清晰的多级Markdown标题(如 ## 和 ###)来组织报告结构。\n专业口吻:保持客观、中立、分析性的专业语调。\n信息缺失处理:如果某些信息在公开渠道无法获取,请明确指出“相关信息未公开披露”或类似说明。\n\n# 报告核心结构与分析要点\n一、 公司基本面分析 (Fundamental Analysis)\n1.1 护城河与核心竞争力\n公司通过何种独有优势(如品牌、技术、成本、网络效应、牌照等)获取超额利润?\n该护城河是在增强、维持还是在削弱?请提供论据。\n1.2 管理层与公司治理\n管理能力:管理层过往的战略决策和执行能力如何?是否有卓越的业界声誉?\n股东回报:管理层及大股东是否珍惜股权价值?(分析历史上的增持/减持行为、分红派息政策、是否存在损害小股东利益的体外资产等)\n激励与目标:公司的经营目标是长期主义还是短期化?管理层的激励机制(如股权激励、考核指标)是否与长期战略目标一致?\n1.3 企业文化与财务政策\n公司是否有独特且可观察到的企业文化?(例如:创新文化、成本控制文化等)\n公司的财务政策(如资本结构、现金流管理、投资策略)与同行业公司相比有何显著特点?是激进还是保守?\n1.4 发展历程与战略规划\n梳理公司发展史上的关键事件、重大业务转型或里程碑。\n公司是否有清晰的长期战略目标(未来5-10年)?计划成为一家什么样的企业?\n二、 业务与市场分析 (Business & Market Analysis)\n2.1 产品与客户价值\n公司为客户提供什么核心产品/服务?其核心价值点是什么?客户为何选择公司的产品而非竞争对手的?\n产品的更新迭代是颠覆性的还是渐进积累型的?分析产品历年的产量、价格及销量变化,并探讨其背后的驱动因素。\n2.2 市场需求与景气度\n客户所处行业的需求是趋势性的高增长,还是周期性波动?或是两者结合?当前处于何种阶段?\n目标客户群体的经营状况和现金流是否健康?\n2.3 议价能力与客户关系\n公司对下游客户的议价能力强弱如何?(结合应收账款周转天数、账龄结构、毛利率等数据进行佐证)\n公司与核心客户的关系是否稳定?客户对公司的评价如何(例如:客户忠诚度、满意度)?\n三、 竞争格局分析 (Competitive Landscape Analysis)\n3.1 竞争对手画像\n列出公司的主要竞争对手,并分析各自的优势与劣势。\n公司的竞争对手是在增多还是减少?行业进入壁垒是在增高还是降低?\n是否存在潜在的跨界竞争者?\n四、 供应链与外部关系 (Supply Chain & External Relations)\n4.1 供应链议价能力\n公司对上游供应商的议价能力如何?(结合应付账款周转天数、采购成本控制等数据进行佐证)\n核心供应商的经营是否稳定?供应链是否存在集中度过高的风险?\n4.2 金融机构关系与融资需求\n公司与金融机构的关系如何?融资渠道是否通畅?\n公司未来的发展是否依赖于大规模的债务或股权融资?\n五、 监管环境与政策风险 (Regulatory Environment & Policy Risks)\n公司所处行业是否存在重要的监管部门?主要的监管政策有哪些?\n监管政策是否稳定?未来可能发生哪些重大变化?对公司有何潜在影响?\n公司是否具备影响或适应监管政策变化的能力?" }, "bull_case": { "name": "看涨分析", - "model": "gemini-2.5-flash", + "model": "qwen-flash", "dependencies": [], "prompt_template": "#### # 角色\n你是一位顶级的成长股投资分析师,拥有敏锐的洞察力,尤其擅长**挖掘市场尚未充分认识到的潜在价值**和**判断长期行业趋势**。你的任务是为目标公司构建一个令人信服的、由证据支持的看涨论述(Bull Case)。\n\n#### # 任务\n为公司 **{company_name}** (股票代码: **{ts_code}**) 生成一份深入的看涨分析报告。报告的核心是论证该公司拥有被市场低估的隐藏资产、持续加深的护城河,并且其所处行业将迎来至少3年以上的景气周期。\n\n#### # 输出要求\n1. **直奔主题**:直接开始分析,无需引言。\n2. **Markdown格式**:使用清晰的标题结构来组织你的论点。\n3. **数据与来源**:所有关键论点都必须有数据、事实或合理的逻辑推演作为支撑。请用*斜体*注明信息来源(如:*来源:公司2023年投资者交流纪要* 或 *来源:中信证券行业研报*)。\n4. **聚焦看涨逻辑**:报告内容应完全围绕支撑看涨观点的论据展开,暂时忽略风险和负面因素。\n5. **前瞻性视角**:分析应侧重于未来3-5年的发展潜力,而不仅仅是回顾历史。\n6. **信息缺失处理**:如果某些推论需要的数据无法公开获取,可以基于现有信息进行合理的逻辑推测,并明确标注“(此为基于...的推测)”。\n\n---\n\n### # 看涨核心论证框架\n\n## 一、 深度挖掘:公司的隐藏资产与未被市场充分定价的价值\n\n### 1.1 资产负债表之外的价值 (Off-Balance Sheet Value)\n- **无形资产**:公司是否拥有未被充分计价的核心技术专利、软件著作权、特许经营权或强大的品牌价值?请量化或举例说明其潜在商业价值。\n- **数据资产**:公司是否积累了具有巨大潜在价值的用户或行业数据?这些数据未来可能的变现途径是什么?\n\n### 1.2 低估的实体或股权资产 (Undervalued Physical or Equity Assets)\n- **土地/物业重估**:公司持有的土地、房产等固定资产,其当前市场公允价值是否远超账面价值?\n- **子公司/投资价值**:公司旗下是否有快速增长但未被市场充分关注的子公司或有价值的长期股权投资?分析其独立估值的潜力。\n\n### 1.3 运营中的“隐形冠军” (Operational \"Hidden Champions\")\n- 公司是否存在独特的、难以复制的生产工艺、供应链管理能力或运营效率优势,而这些优势尚未完全体现在当前的利润率中?\n\n## 二、 护城河的加深:竞争优势的动态强化分析\n\n### 2.1 护城河的动态演变:是静态还是在拓宽?\n- 论证公司的核心护城河(例如:网络效应、转换成本、成本优势、技术壁垒)在未来几年将如何被**强化**而非削弱。请提供具体证据(如:研发投入的持续增长、客户续约率的提升、市场份额的扩大等)。\n\n### 2.2 技术与创新壁垒的领先优势\n- 公司的研发投入和创新产出,如何确保其在未来3-5年内保持对竞争对手的技术代差或领先地位?\n- 是否有即将商业化的“杀手级”新产品或新技术?\n\n### 2.3 品牌与客户粘性的正反馈循环\n- 公司的品牌价值或客户关系如何形成一个正反馈循环(即:强品牌带来高议价能力 -> 高利润投入研发/营销 -> 品牌更强)?\n- 客户为何难以转向竞争对手?分析其高昂的转换成本。\n\n## 三、 长期景气度:行业未来3年以上的持续增长动力\n\n### 3.1 长期需求驱动力(Demand-Side Drivers)\n- 驱动行业增长的核心动力是短期的周期性复苏,还是长期的结构性变迁(如:技术革命、消费升级、国产替代、政策驱动)?请深入论证。\n- 行业的市场渗透率是否仍有巨大提升空间?分析未来市场规模(TAM)的扩张潜力。\n\n### 3.2 供给侧格局优化(Supply-Side Dynamics)\n- 行业供给侧是否出现集中度提升、落后产能出清的趋势?这是否意味着龙头企业的定价权和盈利能力将持续增强?\n- 行业的进入壁垒是否在显著提高(如:技术、资金、资质壁垒),从而限制新竞争者的涌入?\n\n### 3.3 关键催化剂(Key Catalysts)\n- 未来1-2年内,是否存在可以显著提升公司估值或盈利的潜在催化剂事件(如:新产品发布、重要政策落地、海外市场突破等)?" }, "bear_case": { "name": "看跌分析", - "model": "gemini-2.5-flash", + "model": "qwen-flash", "dependencies": [], "prompt_template": "#### # 角色\n你是一位经验丰富的风险控制分析师和审慎的价值投资者,以“能看到别人看不到的风险”而闻名。你的核心任务是**进行压力测试**,识别出公司潜在的、可能导致价值毁灭的重大风险点,并评估其在最坏情况下的价值底线。\n\n#### # 任务\n为公司 **{company_name}** (股票代码: **{ts_code}**) 生成一份审慎的看跌分析报告(Bear Case)。报告需要深入探讨可能侵蚀公司护城河的因素、被市场忽视的潜在风险、行业可能面临的逆风,并对公司的价值底线进行评估。\n\n#### # 输出要求\n1. **直奔主题**:直接开始风险分析,无需引言。\n2. **Markdown格式**:使用清晰的标题结构组织风险论点。\n3. **证据驱动**:所有风险点都必须基于事实、数据或严谨的逻辑推演。请用*斜体*注明信息来源(如:*来源:竞争对手2023年财报* 或 *来源:行业监管政策草案*)。\n4. **聚焦看跌逻辑**:报告应完全围绕看跌观点展开,旨在识别和放大潜在的负面因素。\n5. **底线思维**:分析的核心是评估“事情最坏能到什么程度”,并判断公司的安全边际。\n6. **信息缺失处理**:对于难以量化的风险(如管理层风险),进行定性分析和逻辑阐述。\n\n---\n\n### # 看跌核心论证框架\n\n## 一、 护城河的侵蚀:竞争优势的脆弱性分析 (Moat Erosion: Vulnerability of Competitive Advantages)\n\n### 1.1 现有护城河的潜在威胁\n- 公司的核心护城河(技术、品牌、成本等)是否面临被颠覆的风险?(例如:新技术的出现、竞争对手的模仿或价格战)\n- 客户的转换成本是否真的足够高?是否存在某些因素(如行业标准化)可能降低客户的转换壁垒?\n\n### 1.2 竞争格局的恶化\n- 是否有新的、强大的“跨界”竞争者进入市场?\n- 行业是否从“蓝海”变为“红海”?分析导致竞争加剧的因素(如:产能过剩、产品同质化)。\n- 竞争对手的哪些战略举动可能对公司构成致命打击?\n\n## 二、 隐藏的负债与风险:资产负债表之外的“地雷” (Hidden Liabilities & Risks: Off-Balance Sheet \"Mines\")\n\n### 2.1 潜在的财务风险\n- 公司是否存在大量的或有负债、对外担保或未入表的债务?\n- 公司的现金流健康状况是否脆弱?分析其经营现金流能否覆盖资本开支和债务利息,尤其是在收入下滑的情况下。\n- 应收账款或存货是否存在潜在的暴雷风险?(分析其账龄、周转率和减值计提的充分性)\n\n### 2.2 运营与管理风险\n- 公司是否对单一供应商、单一客户或单一市场存在过度依赖?\n- 公司是否存在“关键人物风险”?创始团队或核心技术人员的离开会对公司造成多大影响?\n- 公司的企业文化或治理结构是否存在可能导致重大决策失误的缺陷?\n\n## 三、 行业逆风与最坏情况分析 (Industry Headwinds & Worst-Case Scenario)\n\n### 3.1 行业天花板与需求逆转\n- 行业渗透率是否已接近饱和?未来的增长空间是否被高估?\n- 驱动行业增长的核心因素是否可持续?是否存在可能导致需求突然逆转的黑天鹅事件(如:政策突变、技术路线改变、消费者偏好转移)?\n\n### 3.2 价值链上的压力传导\n- 上游供应商的议价能力是否在增强,从而挤压公司利润空间?\n- 下游客户的需求是否在萎缩,或者客户的财务状况是否在恶化?\n\n### 3.3 最坏情况压力测试 (Worst-Case Stress Test)\n- **情景假设**:假设行业需求下滑30%,或主要竞争对手发起价格战,公司的收入、利润和现金流会受到多大冲击?\n- **破产风险评估**:在这种极端情况下,公司是否有足够的现金储备和融资能力来度过危机?公司的生存底线在哪里?\n\n### 3.4 价值底线评估:清算价值分析 (Bottom-Line Valuation: Liquidation Value Analysis)\n- **核心假设**:在公司被迫停止经营并清算的极端情况下,其资产的真实变现价值是多少?\n- **资产逐项折价**:请对资产负债表中的主要科目进行折价估算。例如:\n - *现金及等价物*:按100%计算。\n - *应收账款*:根据账龄和客户质量,估计一个合理的回收率(如50%-80%)。\n - *存货*:根据存货类型(原材料、产成品)和市场状况,估计一个变现折扣(如30%-70%)。\n - *固定资产(厂房、设备)*:估计其二手市场的变现价值,通常远低于账面净值。\n - *无形资产/商誉*:大部分在清算时价值归零。\n- **负债计算**:公司的总负债(包括所有表内及表外负债)需要被优先偿还。\n- **清算价值估算**:计算**(折价后的总资产 - 总负债)/ 总股本**,得出每股清算价值。这是公司价值的绝对底线。\n\n## 四、 估值陷阱分析 (Valuation Trap Analysis)\n\n### 4.1 增长预期的证伪\n- 当前的高估值是否隐含了过于乐观的增长预期?论证这些预期为何可能无法实现。\n- 市场是否忽略了公司盈利能力的周期性,而将其误判为长期成长性?\n\n### 4.2 资产质量重估\n- 公司的资产(尤其是商誉、无形资产)是否存在大幅减值的风险?\n- 公司的真实盈利能力(扣除非经常性损益后)是否低于报表利润?\n" }, "market_analysis": { "name": "市场分析", - "model": "gemini-2.5-flash", + "model": "qwen-flash", "prompt_template": "#### # 角色\n你是一位顶级的市场策略分析师,精通行为金融学,对市场情绪和投资者心理有深刻的洞察。你擅长从海量的新闻、研报和市场数据中,提炼出当前市场对特定公司的核心看法、主要分歧点,并预判可能导致情绪反转的关键驱动因素。\n\n#### # 任务\n为公司 **{company_name}** (股票代码: **{ts_code}**) 生成一份当前的市场情绪分析报告。报告应聚焦于解读市场参与者当下的想法,而不是对公司基本面进行独立研究。\n\n#### # 输出要求\n1. **基于近期信息**:分析必须基**最近1-3个月**的公开新闻、分析师评论、社交媒体讨论和市场数据。\n2. **引用新闻来源**:在提到具体事件或观点时,必须用*斜体*注明新闻或信息来源。\n3. **客观呈现分歧**:清晰、中立地展示市场上多空双方的观点,而不是偏向任何一方。\n4. **聚焦“预期差”**:分析的核心是找出市场预期与公司现实之间可能存在的差距。\n5. **Markdown格式**:使用清晰的标题结构组织报告。\n\n---\n\n### # 市场情绪分析框架\n\n## 一、 当前市场主流叙事与估值定位 (Current Market Narrative & Valuation Positioning)\n\n### 1.1 市场的主流故事线是什么?\n- 综合近期(1-3个月内)的新闻报道和券商研报,当前市场在为这家公司讲述一个什么样的“故事”?是“困境反转”、“AI赋能”、“周期复苏”还是“增长放缓”?\n- 这个主流故事线是在近期被强化了,还是开始出现动摇?\n\n### 1.2 当前估值反映了什么预期?\n- 公司当前的估值水平(如市盈率P/E、市净率P/B)在历史和行业中处于什么位置(高位、中位、低位)?\n- 这个估值水平背后,市场“计价”了什么样的增长率、利润率或成功预期?*(例如:市场普遍预期其新业务明年将贡献30%的收入增长)*\n\n## 二、 情绪分歧点:多空双方的核心博弈 (Points of Disagreement: The Core Bull vs. Bear Debate)\n\n### 2.1 关键分歧一:[例如:新产品的市场前景]\n- **看多者认为**:[陈述看多方的核心理由,并引用支持性新闻或数据]\n- **看空者认为**:[陈述看空方的核心理由,并引用支持性新闻或数据]\n\n### 2.2 关键分歧二:[例如:监管政策的影响]\n- **看多者认为**:[陈述看多方的核心理由,并引用支持性新闻或数据]\n- **看空者认为**:[陈述看空方的核心理由,并引用支持性新闻或数据]\n\n### 2.3 市场资金的态度\n- 近期是否有知名的机构投资者在增持或减持?\n- 股票的卖空比例是否有显著变化?这反映了什么情绪?\n\n## 三、 情绪变化的潜在驱动力 (Potential Drivers of Sentiment Change)\n\n### 3.1 近期(未来1-3个月)的关键催化剂\n- 列出未来短期内可能打破当前市场情绪平衡的关键事件。(例如:即将发布的财报、行业重要会议、新产品发布会、重要的宏观数据公布等)\n- 这些事件的结果将如何分别验证或证伪当前多/空双方的逻辑?\n\n### 3.2 识别“预期差”\n- 当前市场最可能“过度乐观”的点是什么?\n- 当前市场最可能“过度悲观”的点是什么?\n- 未来什么样的信息出现,会最大程度地修复这种预期差,并引发股价剧烈波动?\n" }, "news_analysis": { "name": "新闻分析", - "model": "gemini-2.5-flash", + "model": "qwen-flash", "prompt_template": "#### # 角色\n你是一位嗅觉极其敏锐的金融新闻分析师,专注于事件驱动投资策略。你擅长从看似孤立的新闻事件中,解读其深层含义,并精准预判其对公司股价可能造成的催化作用和潜在的拐点。\n\n#### # 任务\n为公司 **{company_name}** (股票代码: **{ts_code}**) 生成一份股价催化剂与拐点预判报告。报告需要梳理近期相关新闻,并基于这些信息,识别出未来可能导致股价发生重大变化的正面及负面催化剂。\n\n#### # 输出要求\n1. **聚焦近期新闻**:分析应主要基于**最近1-2个月**的公司公告、行业新闻、政策文件及权威媒体报道。\n2. **明确时间线**:尽可能为潜在的催化剂事件标注一个预期的时间窗口(例如:“预计在Q4财报发布时”、“未来一个月内”)。\n3. **量化影响**:对于每个催化剂,不仅要定性判断(利好/利空),还要尝试分析其可能的影响级别(重大/中等/轻微)。\n4. **提供观察信号**:为每个预判的拐点,提供需要密切观察的关键信号或数据验证点。\n5. **Markdown格式**:使用清晰的标题结构。\n6. **引用来源**:关键信息需用*斜体*注明来源。\n\n---\n\n### # 股价催化剂与拐点分析框架\n\n## 一、 近期关键新闻梳理与解读 (Recent Key News Flow & Interpretation)\n\n- **新闻事件1:[日期] [新闻标题]**\n - *来源:[例如:公司官网公告 / 彭博社]*\n - **事件概述**:[简要概括新闻内容]\n - **市场初步反应**:[事件发生后,股价和成交量有何变化?]\n - **深层解读**:[该新闻是孤立事件,还是某个趋势的延续?它暗示了公司基本面的何种变化?]\n- **新闻事件2:[日期] [新闻标题]**\n - ... (以此类推)\n\n## 二、 正面催化剂预判 (Potential Positive Catalysts)\n\n### 2.1 确定性较高的催化剂 (High-Probability Catalysts)\n- **催化剂名称**:[例如:新一代产品发布]\n- **预期时间窗口**:[例如:预计在下个月的行业大会上]\n- **触发逻辑**:[为什么这件事会成为股价的正面驱动力?它会如何改善市场预期?]\n- **需观察的信号**:[需要看到什么具体信息(如产品性能参数、预订单数量)才能确认催化剂的有效性?]\n\n### 2.2 潜在的“黑天鹅”式利好 (Potential \"Black Swan\" Positives)\n- **催化剂名称**:[例如:意外获得海外市场准入 / 竞争对手出现重大失误]\n- **触发逻辑**:[描述这种小概率但影响巨大的利好事件及其可能性]\n- **需观察的信号**:[哪些先行指标或行业动态可能预示着这种事件的发生?]\n\n## 三、 负面催化剂预判 (Potential Negative Catalysts)\n\n### 3.1 确定性较高的风险 (High-Probability Risks)\n- **催化剂名称**:[例如:关键专利到期 / 主要客户合同续约谈判]\n- **预期时间窗口**:[例如:本季度末]\n- **触发逻辑**:[为什么这件事可能对股价造成负面冲击?]\n- **需观察的信号**:[需要关注哪些数据或公告来判断风险是否会兑现?]\n\n### 3.2 潜在的“黑天鹅”式风险 (Potential \"Black Swan\" Risks)\n- **催化剂名称**:[例如:突发性的行业监管收紧 / 供应链“断链”风险]\n- **触发逻辑**:[描述这种小概率但影响巨大的风险事件]\n- **需观察的信号**:[哪些蛛丝马迹可能预示着风险的临近?]\n\n## 四、 综合预判:下一个股价拐点 (Synthesis: The Next Inflection Point)\n\n- **核心博弈点**:综合以上分析,当前市场最关注、最可能率先发生的多空催化剂是什么?\n- **拐点预测**:基于当前信息,下一个可能改变股价趋势的关键时间点或事件最有可能是什么?\n- **关键验证指标**:在那个拐点到来之前,我们应该把注意力集中在哪个/哪些最关键的数据或信息上?\n" }, "trading_analysis": { "name": "交易分析", - "model": "gemini-2.5-flash", + "model": "qwen-flash", "prompt_template": "#### # 角色\n你是一位经验丰富的专业交易员,擅长将技术分析、市场赔率计算与基本面催化剂结合起来,制定高胜率的交易策略。你的决策核心是评估“风险回报比”,并寻找“基本面和资金面”可能形成共振(双击)的交易机会。\n\n#### # 任务\n为公司 **{company_name}** (股票代码: **{ts_code}**) 生成一份可执行的交易分析报告。报告需要深入分析当前股价走势,评估潜在的上涨空间与风险,并判断其是否具备形成“戴维斯双击”式上涨的潜力。\n\n#### # 输出要求\n1. **图表导向**:分析应基于对价格图表(K线)、成交量和关键技术指标(如均线、MACD、RSI)的解读。\n2. **量化赔率**:明确计算并展示风险回报比(赔率),作为是否值得参与交易的核心依据。\n3. **明确信号**:给出清晰、无歧义的入场、止损和止盈信号。\n4. **客观中立**:只基于当前的市场数据和图表信号进行分析,避免主观臆测。\n5. **Markdown格式**:使用清晰的标题结构。\n\n---\n\n### # 交易策略分析框架\n\n## 一、 当前价格走势与结构分析 (Current Price Action & Structure Analysis)\n\n### 1.1 趋势与动能\n- **当前趋势**:股价目前处于明确的上升、下降还是盘整趋势中?(*参考:关键均线系统,如MA20, MA60, MA120的排列状态*)\n- **关键水平**:当前最重要的支撑位和阻力位分别在哪里?这些是历史高低点、均线位置还是成交密集区?\n- **量价关系**:近期的成交量与价格波动是否匹配?是否存在“价升量增”的健康上涨或“价跌量增”的恐慌抛售?\n\n### 1.2 图表形态\n- 近期是否形成了关键的K线形态?(例如:突破性阳线、反转信号)\n- 是否存在经典的图表形态?(例如:头肩底、W底、收敛三角形、箱体震荡)\n\n## 二、 市场体量与赔率计算 (Market Capacity & Risk/Reward Calculation)\n\n### 2.1 上涨空间评估 (Upside Potential)\n- 如果向上突破关键阻力位,下一个或几个现实的**目标价位**在哪里?(*参考:前期高点、斐波那契扩展位、形态测量目标*)\n- **潜在回报率**:从当前价格到主要目标价位的潜在上涨百分比是多少?\n\n### 2.2 风险评估与止损设置 (Downside Risk & Stop-Loss)\n- 如果交易逻辑被证伪,一个清晰、有效的**止损价位**应该设在哪里?(*参考:关键支撑位下方、上升趋势线下方*)\n- **潜在风险率**:从当前价格到止损价位的潜在下跌百分比是多少?\n\n### 2.3 赔率分析 (Risk/Reward Ratio)\n- 计算**风险回报比**(= 潜在回报率 / 潜在风险率)。这个比率是否具有吸引力?(*专业交易者通常要求至少大于 2:1 或 3:1*)\n- **市场体量**:该股的日均成交额是否足够大,能够容纳计划中的资金进出而不会造成显著的冲击成本?\n\n## 三、 增长路径:“双击”可能性评估 (Growth Path: \"Dual-Click\" Potential)\n\n### 3.1 基本面驱动力 (Fundamental Momentum)\n- 近期是否有或将要有**基本面催化剂**来支撑股价上涨?(*参考《股价催化剂分析》的结论,如:超预期的财报、新产品成功、行业政策利好*)\n- 这个基本面利好是能提供“一次性”的脉冲,还是能开启一个“持续性”的盈利增长周期?\n\n### 3.2 资金面驱动力 (Capital Momentum)\n- 是否有证据表明**增量资金**正在流入?(*参考:成交量的持续放大、机构投资者的增持报告、龙虎榜数据*)\n- 该股所属的板块或赛道,当前是否受到市场主流资金的青睐?\n\n### 3.3 “双击”可能性综合评估\n- 综合来看,公司出现“**业绩超预期(基本面)+ 估值提升(资金面)**”双击局面的可能性有多大?\n- 触发“双击”的关键信号可能是什么?(例如:在发布亮眼财报后,股价以放量涨停的方式突破关键阻力位)\n\n## 四、 交易计划总结 (Actionable Trading Plan)\n\n- **入场信号**:[具体的入场条件。例如:日线收盘价站上 {阻力位A} 并且成交量放大至 {数值X} 以上]\n- **止损策略**:[具体的止损条件。例如:日线收盘价跌破 {支撑位B}]\n- **止盈策略**:[具体的目标位和操作。例如:在 {目标位C} 止盈50%,剩余仓位跟踪止盈]\n- **仓位管理**:[基于赔率和确定性,建议的初始仓位是多少?]\n" }, "insider_institutional": { "name": "内部人与机构动向分析", - "model": "gemini-2.5-flash", + "model": "qwen-flash", "prompt_template": "#### # 角色\n你是一位专注于追踪“聪明钱”动向的顶级数据分析师。你对解读上市公司内部人(高管、大股东)的交易行为和机构投资者的持仓变化具有丰富的经验,能够从纷繁的数据中识别出预示未来股价走向的关键信号。\n\n#### # 任务\n为公司 **{company_name}** (股票代码: **{ts_code}**) 生成一份关于内部人与机构投资者动向的深度分析报告。报告需覆盖**最近6-12个月**的数据,并解读这些“聪明钱”的行为可能暗示的公司前景。\n\n#### # 输出要求\n1. **数据驱动**:分析必须基于公开的、可验证的数据(如交易所披露的内部人交易记录、基金公司的持仓报告如13F文件等)。\n2. **聚焦近期**:重点分析最近6-12个月的动向,以捕捉最新的趋势变化。\n3. **深度解读,而非罗列**:不仅要呈现数据,更要深入分析交易行为背后的动机。例如,区分主动的公开市场增持与被动的股权激励,分析机构的“新进”与“清仓”。\n4. **结合股价**:将内部人和机构的动向与同期的股价走势相结合,分析是否存在“低位吸筹”或“高位派发”的迹象。\n5. **Markdown格式**:使用清晰的标题结构。\n6. **引用来源**:*在分析时需注明数据来源类型,如:来源:Q3季度机构持仓报告*。\n\n---\n\n### # 内部人与机构动向分析框架\n\n## 一、 内部人动向分析 (Insider Activity Analysis)\n\n### 1.1 核心高管交易 (Key Executive Transactions)\n- **公开市场买卖**:近6-12个月,公司的核心高管(CEO, CFO等)是否有在公开市场**主动买入**或**卖出**自家股票?\n- **交易动机解读**:\n - **买入**:买入的金额、次数以及当时股价所处的位置?(*通常,高管在股价下跌后主动增持,被视为强烈的看多信号*)\n - **卖出**:是出于个人资金需求(如纳税)的一次性小额卖出,还是持续、大量的减持?是否在股价历史高位附近减持?\n- **期权行权**:高管行使期权后,是选择继续持有股票,还是立即在市场卖出?\n\n### 1.2 大股东与董事会成员动向 (Major Shareholder & Director Activity)\n- 持股5%以上的大股东或董事会成员,近期的整体趋势是增持还是减持?\n- 是否存在关键股东(如创始人、战略投资者)的持股比例发生重大变化?\n\n### 1.3 内部人持股的总体趋势\n- 综合来看,内部人近半年的行为释放了什么样的集体信号?是信心增强、信心减弱,还是无明显趋势?\n\n## 二、 机构投资者动向分析 (Institutional Investor Activity Analysis)\n\n### 2.1 机构持股的总体变化\n- **持股比例**:机构投资者的总持股占流通股的比例,在最近几个季度是上升还是下降?\n- **股东数量**:持有该公司股票的机构总数是在增加还是减少?(*数量增加通常意味着市场关注度的提升*)\n\n### 2.2 顶级机构的进出 (Top-Tier Institution Moves)\n- **十大机构股东**:当前最大的机构股东有哪些?在最近一个报告期,它们是“增持”、“减持”、“新进”还是“清仓”?\n- **“聪明钱”的踪迹**:是否有以长期价值投资著称的知名基金(如高瓴、景林、Fidelity等)新进入了股东名单,或者大幅增持?\n- 反之,是否有顶级机构在清仓式卖出?\n\n### 2.3 机构观点的“一致性”\n- 从机构的整体行为来看,市场主流机构对该公司的看法是趋于一致(大家都在买或都在卖),还是存在巨大分歧?\n\n## 三、 综合研判:“聪明钱”的信号 (Synthesized Verdict: The \"Smart Money\" Signal)\n\n### 3.1 信号的一致性与背离\n- 内部人和机构投资者的行动方向是否一致?(*例如:内部人增持的同时,顶级机构也在建仓,这是一个极强的看多信号*)\n- “聪明钱”的动向是否与当前市场情绪或股价走势相背离?(*例如:在散户普遍悲观、股价下跌时,内部人和机构却在持续买入*)\n\n### 3.2 最终结论\n- 综合来看,在未来3-6个月,来自“聪明钱”的资金流向是可能成为股价的**顺风**(Tailwind)还是**逆风**(Headwind)?\n" }, "final_conclusion": { "name": "最终结论", - "model": "gemini-2.5-flash", + "model": "qwen-flash", "prompt_template": "#### # 角色\n你是一位顶级的基金公司首席投资官(CIO),你的工作不是进行初步研究,而是听取旗下所有分析师(基本面、宏观、技术、新闻、数据等)的报告后,做出最终的、高质量的投资决策。你必须能够穿透信息的迷雾,抓住主要矛盾,并给出明确的行动指令。\n\n#### # 任务\n基于以下七个维度的分析报告(由你的团队提供),为公司 **{company_name}** (股票代码: **{ts_code}**) 形成一份最终的投资决策备忘录。\n\n- **基本面分析**: `{fundamental_analysis}`\n- **看涨分析**: `{bull_case}`\n- **看跌分析**: `{bear_case}`\n- **市场情绪分析**: `{market_analysis}`\n- **新闻催化剂分析**: `{news_analysis}`\n- **交易策略分析**: `{trading_analysis}`\n- **内部人与机构动向**: `{insider_institutional}`\n\n#### # 输出要求\n1. **全局视角**:必须将所有输入信息融会贯通,形成一个逻辑自洽的、立体的投资论点。\n2. **抓住核心**:聚焦于识别当前局面的“核心矛盾”和最大的“预期差”。\n3. **决策导向**:结论必须是明确的、可执行的,并包含对“时机”和“价值”的量化评估。\n4. **精炼语言**:使用专业、果断、直击要害的语言。\n5. **Markdown格式**:使用清晰的标题结构。\n\n---\n\n### # 最终投资决策备忘录\n\n## 一、 核心矛盾与预期差 (Core Contradiction & Expectation Gap)\n\n- **当前的核心矛盾是什么?** 综合所有分析,当前多空双方争论的、最核心的、最关键的一个问题是什么?(例如:是“高估值下的成长故事”与“宏观逆风下的业绩担忧”之间的矛盾?还是“革命性产品”与“商业化落地不确定性”之间的矛盾?)\n- **最大的预期差在哪里?** 我们认为市场在哪一个关键点上可能犯了最大的错误?是我们比市场更乐观,还是更悲观?具体体现在哪个方面?\n\n## 二、 拐点的临近度与关键信号 (Proximity to Inflection Point & Key Signals)\n\n- **拐点是否临近?** 能够解决上述“核心矛盾”的关键催化剂事件,是否即将发生?(参考新闻和催化剂分析)\n- **我们需要验证什么?** 在拐点到来之前,我们需要密切跟踪和验证的、最关键的1-2个数据或信号是什么?(例如:是新产品的预订单数量,还是下一个季度的毛利率指引?)\n\n## 三、 综合投资论点 (Synthesized Investment Thesis)\n\n- **质量与价值(基本面 & 看跌风险)**:这家公司的“质量”如何?它的护城河是否足够深厚,能够在最坏的情况下提供足够的安全边际(清算价值)?\n- **成长与赔率(看涨 & 交易分析)**:如果看涨逻辑兑现,潜在的回报空间有多大?当前的交易结构是否提供了有吸引力的风险回报比?\n- **情绪与资金(市场情绪 & 聪明钱)**:当前的市场情绪是助力还是阻力?“聪明钱”的流向是在支持还是反对我们的判断?\n- **时机与催化剂(新闻分析)**:现在是合适的扣动扳机的时间点吗?还是需要等待某个关键催化剂的出现?\n\n## 四、 最终决策与评级 (Final Decision & Rating)\n\n- **投资结论**:[明确给出:**买入 / 增持 / 观望 / 减持 / 卖出**]\n- **核心投资逻辑**:[用一句话总结本次决策的核心理由]\n\n- **值得参与度评分**:**[请打分, 1-10分]**\n - *(评分标准:1-3分=机会不佳;4-6分=值得观察;7-8分=良好机会,建议配置;9-10分=极佳机会,应重点配置)*\n\n- **关注时间维度**:**[请选择:紧急 / 中期 / 长期]**\n - *(评级标准:**紧急**=关键拐点预计在1个月内;**中期**=关键拐点预计在1-6个月;**长期**=需要持续跟踪6个月以上)*\n", "dependencies": [ "fundamental_analysis", diff --git a/config/data_sources.yaml b/config/data_sources.yaml new file mode 100644 index 0000000..80506f5 --- /dev/null +++ b/config/data_sources.yaml @@ -0,0 +1,37 @@ +# Configuration for data sources used by the DataManager + +# Defines the available data sources and their specific configurations. +# 'api_key_env' specifies the environment variable that should hold the API key/token. +data_sources: + tushare: + api_key_env: TUSHARE_TOKEN + description: "Primary data source for China market (A-shares)." + yfinance: + api_key_env: null # No API key required + description: "Good for global market data, especially US stocks." + finnhub: + api_key_env: FINNHUB_API_KEY + description: "Another comprehensive source for global stock data." + +# Defines the priority of data providers for each market. +# The DataManager will try them in order until data is successfully fetched. +markets: + CN: # China Market + priority: + - tushare + - yfinance # yfinance can be a fallback + US: # US Market + priority: + - yfinance + - finnhub + HK: # Hong Kong Market + priority: + - yfinance + - finnhub + JP: # Japan Market + priority: + - yfinance + DEFAULT: + priority: + - yfinance + - finnhub diff --git a/docs/user-guide.md b/docs/user-guide.md index 36b80ab..7ae8797 100644 --- a/docs/user-guide.md +++ b/docs/user-guide.md @@ -265,3 +265,6 @@ A: **最后更新**:2025年1月 + + + diff --git a/frontend/.gitignore b/frontend/.gitignore index 5ef6a52..f390d12 100644 --- a/frontend/.gitignore +++ b/frontend/.gitignore @@ -39,3 +39,5 @@ yarn-error.log* # typescript *.tsbuildinfo next-env.d.ts + +/src/generated/prisma diff --git a/frontend/next.config.mjs b/frontend/next.config.mjs index af90794..0dc09c1 100644 --- a/frontend/next.config.mjs +++ b/frontend/next.config.mjs @@ -11,7 +11,7 @@ const nextConfig = { }, // Increase server timeout for long-running AI requests experimental: { - proxyTimeout: 120000, // 120 seconds + proxyTimeout: 300000, // 300 seconds (5 minutes) }, async rewrites() { return [ diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 4c5fea9..b45a1ae 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -8,6 +8,7 @@ "name": "frontend", "version": "0.1.0", "dependencies": { + "@prisma/client": "^6.18.0", "@radix-ui/react-checkbox": "^1.3.3", "@radix-ui/react-navigation-menu": "^1.2.14", "@radix-ui/react-select": "^2.2.6", @@ -15,6 +16,7 @@ "@radix-ui/react-tabs": "^1.1.13", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", + "geist": "^1.5.1", "github-markdown-css": "^5.8.1", "lucide-react": "^0.545.0", "next": "15.5.5", @@ -35,6 +37,7 @@ "@types/react-dom": "^19", "eslint": "^9", "eslint-config-next": "15.5.5", + "prisma": "^6.18.0", "tailwindcss": "^4", "tw-animate-css": "^1.4.0", "typescript": "^5" @@ -1016,6 +1019,91 @@ "node": ">=12.4.0" } }, + "node_modules/@prisma/client": { + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/@prisma/client/-/client-6.18.0.tgz", + "integrity": "sha512-jnL2I9gDnPnw4A+4h5SuNn8Gc+1mL1Z79U/3I9eE2gbxJG1oSA+62ByPW4xkeDgwE0fqMzzpAZ7IHxYnLZ4iQA==", + "hasInstallScript": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18.18" + }, + "peerDependencies": { + "prisma": "*", + "typescript": ">=5.1.0" + }, + "peerDependenciesMeta": { + "prisma": { + "optional": true + }, + "typescript": { + "optional": true + } + } + }, + "node_modules/@prisma/config": { + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/@prisma/config/-/config-6.18.0.tgz", + "integrity": "sha512-rgFzspCpwsE+q3OF/xkp0fI2SJ3PfNe9LLMmuSVbAZ4nN66WfBiKqJKo/hLz3ysxiPQZf8h1SMf2ilqPMeWATQ==", + "devOptional": true, + "license": "Apache-2.0", + "dependencies": { + "c12": "3.1.0", + "deepmerge-ts": "7.1.5", + "effect": "3.18.4", + "empathic": "2.0.0" + } + }, + "node_modules/@prisma/debug": { + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/@prisma/debug/-/debug-6.18.0.tgz", + "integrity": "sha512-PMVPMmxPj0ps1VY75DIrT430MoOyQx9hmm174k6cmLZpcI95rAPXOQ+pp8ANQkJtNyLVDxnxVJ0QLbrm/ViBcg==", + "devOptional": true, + "license": "Apache-2.0" + }, + "node_modules/@prisma/engines": { + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/@prisma/engines/-/engines-6.18.0.tgz", + "integrity": "sha512-i5RzjGF/ex6AFgqEe2o1IW8iIxJGYVQJVRau13kHPYEL1Ck8Zvwuzamqed/1iIljs5C7L+Opiz5TzSsUebkriA==", + "devOptional": true, + "hasInstallScript": true, + "license": "Apache-2.0", + "dependencies": { + "@prisma/debug": "6.18.0", + "@prisma/engines-version": "6.18.0-8.34b5a692b7bd79939a9a2c3ef97d816e749cda2f", + "@prisma/fetch-engine": "6.18.0", + "@prisma/get-platform": "6.18.0" + } + }, + "node_modules/@prisma/engines-version": { + "version": "6.18.0-8.34b5a692b7bd79939a9a2c3ef97d816e749cda2f", + "resolved": "https://registry.npmjs.org/@prisma/engines-version/-/engines-version-6.18.0-8.34b5a692b7bd79939a9a2c3ef97d816e749cda2f.tgz", + "integrity": "sha512-T7Af4QsJQnSgWN1zBbX+Cha5t4qjHRxoeoWpK4JugJzG/ipmmDMY5S+O0N1ET6sCBNVkf6lz+Y+ZNO9+wFU8pQ==", + "devOptional": true, + "license": "Apache-2.0" + }, + "node_modules/@prisma/fetch-engine": { + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/@prisma/fetch-engine/-/fetch-engine-6.18.0.tgz", + "integrity": "sha512-TdaBvTtBwP3IoqVYoGIYpD4mWlk0pJpjTJjir/xLeNWlwog7Sl3bD2J0jJ8+5+q/6RBg+acb9drsv5W6lqae7A==", + "devOptional": true, + "license": "Apache-2.0", + "dependencies": { + "@prisma/debug": "6.18.0", + "@prisma/engines-version": "6.18.0-8.34b5a692b7bd79939a9a2c3ef97d816e749cda2f", + "@prisma/get-platform": "6.18.0" + } + }, + "node_modules/@prisma/get-platform": { + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/@prisma/get-platform/-/get-platform-6.18.0.tgz", + "integrity": "sha512-uXNJCJGhxTCXo2B25Ta91Rk1/Nmlqg9p7G9GKh8TPhxvAyXCvMNQoogj4JLEUy+3ku8g59cpyQIKFhqY2xO2bg==", + "devOptional": true, + "license": "Apache-2.0", + "dependencies": { + "@prisma/debug": "6.18.0" + } + }, "node_modules/@radix-ui/number": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/@radix-ui/number/-/number-1.1.1.tgz", @@ -3079,6 +3167,35 @@ "node": ">=8" } }, + "node_modules/c12": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/c12/-/c12-3.1.0.tgz", + "integrity": "sha512-uWoS8OU1MEIsOv8p/5a82c3H31LsWVR5qiyXVfBNOzfffjUWtPnhAb4BYI2uG2HfGmZmFjCtui5XNWaps+iFuw==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "chokidar": "^4.0.3", + "confbox": "^0.2.2", + "defu": "^6.1.4", + "dotenv": "^16.6.1", + "exsolve": "^1.0.7", + "giget": "^2.0.0", + "jiti": "^2.4.2", + "ohash": "^2.0.11", + "pathe": "^2.0.3", + "perfect-debounce": "^1.0.0", + "pkg-types": "^2.2.0", + "rc9": "^2.1.2" + }, + "peerDependencies": { + "magicast": "^0.3.5" + }, + "peerDependenciesMeta": { + "magicast": { + "optional": true + } + } + }, "node_modules/call-bind": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", @@ -3226,6 +3343,22 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/chokidar": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz", + "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "readdirp": "^4.0.1" + }, + "engines": { + "node": ">= 14.16.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, "node_modules/chownr": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz", @@ -3236,6 +3369,16 @@ "node": ">=18" } }, + "node_modules/citty": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/citty/-/citty-0.1.6.tgz", + "integrity": "sha512-tskPPKEs8D2KPafUypv2gxwJP8h/OaJmC82QQGGDQcHvXX43xF2VDACcJVmZ0EuSxkpO9Kc4MlrA3q0+FG58AQ==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "consola": "^3.2.3" + } + }, "node_modules/class-variance-authority": { "version": "0.7.1", "resolved": "https://registry.npmjs.org/class-variance-authority/-/class-variance-authority-0.7.1.tgz", @@ -3300,6 +3443,23 @@ "dev": true, "license": "MIT" }, + "node_modules/confbox": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.2.2.tgz", + "integrity": "sha512-1NB+BKqhtNipMsov4xI/NnhCKp9XG9NamYp5PVm9klAT0fsrNPjaFICsCFhNhwZJKNh7zB/3q8qXz0E9oaMNtQ==", + "devOptional": true, + "license": "MIT" + }, + "node_modules/consola": { + "version": "3.4.2", + "resolved": "https://registry.npmjs.org/consola/-/consola-3.4.2.tgz", + "integrity": "sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==", + "devOptional": true, + "license": "MIT", + "engines": { + "node": "^14.18.0 || >=16.10.0" + } + }, "node_modules/cross-spawn": { "version": "7.0.6", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", @@ -3546,6 +3706,16 @@ "dev": true, "license": "MIT" }, + "node_modules/deepmerge-ts": { + "version": "7.1.5", + "resolved": "https://registry.npmjs.org/deepmerge-ts/-/deepmerge-ts-7.1.5.tgz", + "integrity": "sha512-HOJkrhaYsweh+W+e74Yn7YStZOilkoPb6fycpwNLKzSPtruFs48nYis0zy5yJz1+ktUhHxoRDJ27RQAWLIJVJw==", + "devOptional": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=16.0.0" + } + }, "node_modules/define-data-property": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", @@ -3582,6 +3752,13 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/defu": { + "version": "6.1.4", + "resolved": "https://registry.npmjs.org/defu/-/defu-6.1.4.tgz", + "integrity": "sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg==", + "devOptional": true, + "license": "MIT" + }, "node_modules/dequal": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", @@ -3591,6 +3768,13 @@ "node": ">=6" } }, + "node_modules/destr": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/destr/-/destr-2.0.5.tgz", + "integrity": "sha512-ugFTXCtDZunbzasqBxrK93Ik/DRYsO6S/fedkWEMKqt04xZ4csmnmwGDBAb07QWNaGMAmnTIemsYZCksjATwsA==", + "devOptional": true, + "license": "MIT" + }, "node_modules/detect-libc": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", @@ -3633,6 +3817,19 @@ "node": ">=0.10.0" } }, + "node_modules/dotenv": { + "version": "16.6.1", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.6.1.tgz", + "integrity": "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==", + "devOptional": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com" + } + }, "node_modules/dunder-proto": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", @@ -3648,6 +3845,17 @@ "node": ">= 0.4" } }, + "node_modules/effect": { + "version": "3.18.4", + "resolved": "https://registry.npmjs.org/effect/-/effect-3.18.4.tgz", + "integrity": "sha512-b1LXQJLe9D11wfnOKAk3PKxuqYshQ0Heez+y5pnkd3jLj1yx9QhM72zZ9uUrOQyNvrs2GZZd/3maL0ZV18YuDA==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "@standard-schema/spec": "^1.0.0", + "fast-check": "^3.23.1" + } + }, "node_modules/emoji-regex": { "version": "9.2.2", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", @@ -3655,6 +3863,16 @@ "dev": true, "license": "MIT" }, + "node_modules/empathic": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/empathic/-/empathic-2.0.0.tgz", + "integrity": "sha512-i6UzDscO/XfAcNYD75CfICkmfLedpyPDdozrLMmQc5ORaQcdMoc21OnlEylMIqI7U8eniKrPMxxtj8k0vhmJhA==", + "devOptional": true, + "license": "MIT", + "engines": { + "node": ">=14" + } + }, "node_modules/enhanced-resolve": { "version": "5.18.3", "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.3.tgz", @@ -4311,12 +4529,42 @@ "integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==", "license": "MIT" }, + "node_modules/exsolve": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/exsolve/-/exsolve-1.0.7.tgz", + "integrity": "sha512-VO5fQUzZtI6C+vx4w/4BWJpg3s/5l+6pRQEHzFRM8WFi4XffSP1Z+4qi7GbjWbvRQEbdIco5mIMq+zX4rPuLrw==", + "devOptional": true, + "license": "MIT" + }, "node_modules/extend": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", "license": "MIT" }, + "node_modules/fast-check": { + "version": "3.23.2", + "resolved": "https://registry.npmjs.org/fast-check/-/fast-check-3.23.2.tgz", + "integrity": "sha512-h5+1OzzfCC3Ef7VbtKdcv7zsstUQwUDlYpUTvjeUsJAssPgLn7QzbboPtL5ro04Mq0rPOsMzl7q5hIbRs2wD1A==", + "devOptional": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/dubzzz" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fast-check" + } + ], + "license": "MIT", + "dependencies": { + "pure-rand": "^6.1.0" + }, + "engines": { + "node": ">=8.0.0" + } + }, "node_modules/fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", @@ -4499,6 +4747,15 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/geist": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/geist/-/geist-1.5.1.tgz", + "integrity": "sha512-mAHZxIsL2o3ZITFaBVFBnwyDOw+zNLYum6A6nIjpzCGIO8QtC3V76XF2RnZTyLx1wlDTmMDy8jg3Ib52MIjGvQ==", + "license": "SIL OPEN FONT LICENSE", + "peerDependencies": { + "next": ">=13.2.0" + } + }, "node_modules/generator-function": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/generator-function/-/generator-function-2.0.1.tgz", @@ -4588,6 +4845,24 @@ "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" } }, + "node_modules/giget": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/giget/-/giget-2.0.0.tgz", + "integrity": "sha512-L5bGsVkxJbJgdnwyuheIunkGatUF/zssUoxxjACCseZYAVbaqdh9Tsmmlkl8vYan09H7sbvKt4pS8GqKLBrEzA==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "citty": "^0.1.6", + "consola": "^3.4.0", + "defu": "^6.1.4", + "node-fetch-native": "^1.6.6", + "nypm": "^0.6.0", + "pathe": "^2.0.3" + }, + "bin": { + "giget": "dist/cli.mjs" + } + }, "node_modules/github-markdown-css": { "version": "5.8.1", "resolved": "https://registry.npmjs.org/github-markdown-css/-/github-markdown-css-5.8.1.tgz", @@ -5385,7 +5660,7 @@ "version": "2.6.1", "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.6.1.tgz", "integrity": "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==", - "dev": true, + "devOptional": true, "license": "MIT", "bin": { "jiti": "lib/jiti-cli.mjs" @@ -6871,6 +7146,33 @@ "node": "^10 || ^12 || >=14" } }, + "node_modules/node-fetch-native": { + "version": "1.6.7", + "resolved": "https://registry.npmjs.org/node-fetch-native/-/node-fetch-native-1.6.7.tgz", + "integrity": "sha512-g9yhqoedzIUm0nTnTqAQvueMPVOuIY16bqgAJJC8XOOubYFNwz6IER9qs0Gq2Xd0+CecCKFjtdDTMA4u4xG06Q==", + "devOptional": true, + "license": "MIT" + }, + "node_modules/nypm": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/nypm/-/nypm-0.6.2.tgz", + "integrity": "sha512-7eM+hpOtrKrBDCh7Ypu2lJ9Z7PNZBdi/8AT3AX8xoCj43BBVHD0hPSTEvMtkMpfs8FCqBGhxB+uToIQimA111g==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "citty": "^0.1.6", + "consola": "^3.4.2", + "pathe": "^2.0.3", + "pkg-types": "^2.3.0", + "tinyexec": "^1.0.1" + }, + "bin": { + "nypm": "dist/cli.mjs" + }, + "engines": { + "node": "^14.16.0 || >=16.10.0" + } + }, "node_modules/object-assign": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", @@ -6994,6 +7296,13 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/ohash": { + "version": "2.0.11", + "resolved": "https://registry.npmjs.org/ohash/-/ohash-2.0.11.tgz", + "integrity": "sha512-RdR9FQrFwNBNXAr4GixM8YaRZRJ5PUWbKYbE5eOsrwAjJW0q2REGcf79oYPsLyskQCZG1PLN+S/K1V00joZAoQ==", + "devOptional": true, + "license": "MIT" + }, "node_modules/optionator": { "version": "0.9.4", "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", @@ -7127,6 +7436,20 @@ "dev": true, "license": "MIT" }, + "node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "devOptional": true, + "license": "MIT" + }, + "node_modules/perfect-debounce": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/perfect-debounce/-/perfect-debounce-1.0.0.tgz", + "integrity": "sha512-xCy9V055GLEqoFaHoC1SoLIaLmWctgCUaBaWxDZ7/Zx4CTyX7cJQLJOok/orfjZAh9kEYpjJa4d0KcJmCbctZA==", + "devOptional": true, + "license": "MIT" + }, "node_modules/picocolors": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", @@ -7146,6 +7469,18 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/pkg-types": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-2.3.0.tgz", + "integrity": "sha512-SIqCzDRg0s9npO5XQ3tNZioRY1uK06lA41ynBC1YmFTmnY6FjUjVt6s4LoADmwoig1qqD0oK8h1p/8mlMx8Oig==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "confbox": "^0.2.2", + "exsolve": "^1.0.7", + "pathe": "^2.0.3" + } + }, "node_modules/possible-typed-array-names": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz", @@ -7195,6 +7530,32 @@ "node": ">= 0.8.0" } }, + "node_modules/prisma": { + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/prisma/-/prisma-6.18.0.tgz", + "integrity": "sha512-bXWy3vTk8mnRmT+SLyZBQoC2vtV9Z8u7OHvEu+aULYxwiop/CPiFZ+F56KsNRNf35jw+8wcu8pmLsjxpBxAO9g==", + "devOptional": true, + "hasInstallScript": true, + "license": "Apache-2.0", + "dependencies": { + "@prisma/config": "6.18.0", + "@prisma/engines": "6.18.0" + }, + "bin": { + "prisma": "build/index.js" + }, + "engines": { + "node": ">=18.18" + }, + "peerDependencies": { + "typescript": ">=5.1.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, "node_modules/prop-types": { "version": "15.8.1", "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", @@ -7227,6 +7588,23 @@ "node": ">=6" } }, + "node_modules/pure-rand": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-6.1.0.tgz", + "integrity": "sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==", + "devOptional": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/dubzzz" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fast-check" + } + ], + "license": "MIT" + }, "node_modules/queue-microtask": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", @@ -7248,6 +7626,17 @@ ], "license": "MIT" }, + "node_modules/rc9": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/rc9/-/rc9-2.1.2.tgz", + "integrity": "sha512-btXCnMmRIBINM2LDZoEmOogIZU7Qe7zn4BpomSKZ/ykbLObuBdvG+mFq11DL6fjH1DRwHhrlgtYWG96bJiC7Cg==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "defu": "^6.1.4", + "destr": "^2.0.3" + } + }, "node_modules/react": { "version": "19.1.0", "resolved": "https://registry.npmjs.org/react/-/react-19.1.0.tgz", @@ -7394,6 +7783,20 @@ } } }, + "node_modules/readdirp": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz", + "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==", + "devOptional": true, + "license": "MIT", + "engines": { + "node": ">= 14.18.0" + }, + "funding": { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + }, "node_modules/recharts": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/recharts/-/recharts-3.3.0.tgz", @@ -8217,6 +8620,13 @@ "integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==", "license": "MIT" }, + "node_modules/tinyexec": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.1.tgz", + "integrity": "sha512-5uC6DDlmeqiOwCPmK9jMSdOuZTh8bU39Ys6yidB+UTt5hfZUPGAypSgFRiEp+jbi9qH40BLDvy85jIU88wKSqw==", + "devOptional": true, + "license": "MIT" + }, "node_modules/tinyglobby": { "version": "0.2.15", "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", @@ -8435,7 +8845,7 @@ "version": "5.9.3", "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", - "dev": true, + "devOptional": true, "license": "Apache-2.0", "bin": { "tsc": "bin/tsc", diff --git a/frontend/package.json b/frontend/package.json index 5560a36..71c500e 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -3,12 +3,13 @@ "version": "0.1.0", "private": true, "scripts": { - "dev": "next dev --turbopack", + "dev": "next dev -p 3001", "build": "next build", "start": "next start", "lint": "eslint" }, "dependencies": { + "@prisma/client": "^6.18.0", "@radix-ui/react-checkbox": "^1.3.3", "@radix-ui/react-navigation-menu": "^1.2.14", "@radix-ui/react-select": "^2.2.6", @@ -16,6 +17,7 @@ "@radix-ui/react-tabs": "^1.1.13", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", + "geist": "^1.5.1", "github-markdown-css": "^5.8.1", "lucide-react": "^0.545.0", "next": "15.5.5", @@ -36,6 +38,7 @@ "@types/react-dom": "^19", "eslint": "^9", "eslint-config-next": "15.5.5", + "prisma": "^6.18.0", "tailwindcss": "^4", "tw-animate-css": "^1.4.0", "typescript": "^5" diff --git a/frontend/prisma/migrations/migration_lock.toml b/frontend/prisma/migrations/migration_lock.toml new file mode 100644 index 0000000..044d57c --- /dev/null +++ b/frontend/prisma/migrations/migration_lock.toml @@ -0,0 +1,3 @@ +# Please do not edit this file manually +# It should be added in your version-control system (e.g., Git) +provider = "postgresql" diff --git a/frontend/prisma/schema.prisma b/frontend/prisma/schema.prisma new file mode 100644 index 0000000..dbeadc2 --- /dev/null +++ b/frontend/prisma/schema.prisma @@ -0,0 +1,19 @@ +// This is your Prisma schema file, +// learn more about it in the docs: https://pris.ly/d/prisma-schema + +generator client { + provider = "prisma-client-js" +} + +datasource db { + provider = "postgresql" + url = env("DATABASE_URL") + shadowDatabaseUrl = env("PRISMA_MIGRATE_SHADOW_DATABASE_URL") +} + +model Report { + id String @id @default(uuid()) + symbol String + content Json + createdAt DateTime @default(now()) +} diff --git a/frontend/src/app/api/financials/[...slug]/route.ts b/frontend/src/app/api/financials/[...slug]/route.ts index 356c19f..4457f75 100644 --- a/frontend/src/app/api/financials/[...slug]/route.ts +++ b/frontend/src/app/api/financials/[...slug]/route.ts @@ -11,6 +11,14 @@ export async function GET( const path = slug.join('/'); const target = `${BACKEND_BASE}/financials/${path}${url.search}`; const resp = await fetch(target, { headers: { 'Content-Type': 'application/json' } }); - const text = await resp.text(); - return new Response(text, { status: resp.status, headers: { 'Content-Type': resp.headers.get('Content-Type') || 'application/json' } }); + // 透传后端响应(支持流式 body) + const headers = new Headers(); + // 复制关键头,减少代理层缓冲 + const contentType = resp.headers.get('content-type') || 'application/json; charset=utf-8'; + headers.set('content-type', contentType); + const cacheControl = resp.headers.get('cache-control'); + if (cacheControl) headers.set('cache-control', cacheControl); + const xAccelBuffering = resp.headers.get('x-accel-buffering'); + if (xAccelBuffering) headers.set('x-accel-buffering', xAccelBuffering); + return new Response(resp.body, { status: resp.status, headers }); } diff --git a/frontend/src/app/api/reports/[id]/route.ts b/frontend/src/app/api/reports/[id]/route.ts new file mode 100644 index 0000000..5e4ff6c --- /dev/null +++ b/frontend/src/app/api/reports/[id]/route.ts @@ -0,0 +1,29 @@ +import { NextRequest } from 'next/server' +import { prisma } from '../../../../lib/prisma' + +export async function GET( + req: NextRequest, + context: { params: Promise<{ id: string }> } +) { + // 优先从动态路由 params(Promise)获取,其次从 URL 最后一段兜底 + let id: string | undefined + try { + const { id: idFromParams } = await context.params + id = idFromParams + } catch { + // ignore + } + if (!id) { + id = new URL(req.url).pathname.split('/').pop() || undefined + } + + if (!id) { + return Response.json({ error: 'missing id' }, { status: 400 }) + } + + const report = await prisma.report.findUnique({ where: { id } }) + if (!report) { + return Response.json({ error: 'not found' }, { status: 404 }) + } + return Response.json(report) +} diff --git a/frontend/src/app/api/reports/route.ts b/frontend/src/app/api/reports/route.ts new file mode 100644 index 0000000..55c0d22 --- /dev/null +++ b/frontend/src/app/api/reports/route.ts @@ -0,0 +1,42 @@ +import { NextRequest } from 'next/server' +import { prisma } from '../../../lib/prisma' + +export async function GET(req: NextRequest) { + const url = new URL(req.url) + const limit = Number(url.searchParams.get('limit') || 50) + const offset = Number(url.searchParams.get('offset') || 0) + + const [items, total] = await Promise.all([ + prisma.report.findMany({ + orderBy: { createdAt: 'desc' }, + skip: offset, + take: Math.min(Math.max(limit, 1), 200) + }), + prisma.report.count() + ]) + + return Response.json({ items, total }) +} + +export async function POST(req: NextRequest) { + try { + const body = await req.json() + const symbol = String(body.symbol || '').trim() + const content = body.content + + if (!symbol) { + return Response.json({ error: 'symbol is required' }, { status: 400 }) + } + if (typeof content === 'undefined') { + return Response.json({ error: 'content is required' }, { status: 400 }) + } + + const created = await prisma.report.create({ + data: { symbol, content } + }) + + return Response.json(created, { status: 201 }) + } catch (e) { + return Response.json({ error: 'invalid json body' }, { status: 400 }) + } +} diff --git a/frontend/src/app/fonts/README.md b/frontend/src/app/fonts/README.md new file mode 100644 index 0000000..575f5ad --- /dev/null +++ b/frontend/src/app/fonts/README.md @@ -0,0 +1,16 @@ +将本地自托管字体放在此目录。 + +需要文件(建议): +- GeistVF.woff2 +- GeistMonoVF.woff2 + +来源建议: +- 若你已有字体授权,可从官方来源或内部制品库获取 WOFF2 变体文件。 + +放置后无需额外配置,`src/app/layout.tsx` 已使用 next/font/local 引用: +- ./fonts/GeistVF.woff2 -> --font-geist-sans +- ./fonts/GeistMonoVF.woff2 -> --font-geist-mono + +若暂时没有字体文件,页面会退回系统默认字体,不影响功能。 + + diff --git a/frontend/src/app/layout.tsx b/frontend/src/app/layout.tsx index f2aa6d1..c682509 100644 --- a/frontend/src/app/layout.tsx +++ b/frontend/src/app/layout.tsx @@ -1,5 +1,6 @@ import type { Metadata } from "next"; -import { Geist, Geist_Mono } from "next/font/google"; +import { GeistSans } from 'geist/font/sans' +import { GeistMono } from 'geist/font/mono' import "./globals.css"; import { NavigationMenu, @@ -8,15 +9,9 @@ import { NavigationMenuList, } from "@/components/ui/navigation-menu"; -const geistSans = Geist({ - variable: "--font-geist-sans", - subsets: ["latin"], -}); - -const geistMono = Geist_Mono({ - variable: "--font-geist-mono", - subsets: ["latin"], -}); +// 官方 Geist 字体(npm 包) +const geistSans = GeistSans; +const geistMono = GeistMono; export const metadata: Metadata = { title: "Fundamental Analysis", @@ -40,7 +35,7 @@ export default function RootLayout({ 首页 - 报表 + 历史报告 文档 diff --git a/frontend/src/app/report/[symbol]/page.tsx b/frontend/src/app/report/[symbol]/page.tsx index c54b6ed..20ab870 100644 --- a/frontend/src/app/report/[symbol]/page.tsx +++ b/frontend/src/app/report/[symbol]/page.tsx @@ -44,9 +44,6 @@ export default function ReportPage() { // 分析类型列表(按顺序) const analysisTypes = useMemo(() => { if (!analysisConfig?.analysis_modules) return []; - // The order now comes from the backend's topological sort, - // but we can define a preferred order for display if needed. - // For now, let's just get the keys. return Object.keys(analysisConfig.analysis_modules); }, [analysisConfig]); @@ -94,6 +91,49 @@ export default function ReportPage() { error?: string; }>>([]); + const [saving, setSaving] = useState(false) + const [saveMsg, setSaveMsg] = useState(null) + + const saveReport = async () => { + try { + setSaving(true) + setSaveMsg(null) + const content = { + market, + normalizedSymbol: normalizedTsCode, + financialsMeta: financials?.meta || null, + // 同步保存财务数据(用于报告详情页展示) + financials: financials + ? { + ts_code: financials.ts_code, + name: (financials as any).name, + series: financials.series, + meta: financials.meta, + } + : null, + analyses: Object.fromEntries( + Object.entries(analysisStates).map(([k, v]) => [k, { content: v.content, error: v.error, elapsed_ms: v.elapsed_ms, tokens: v.tokens }]) + ) + } + const resp = await fetch('/api/reports', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ symbol: normalizedTsCode, content }) + }) + if (!resp.ok) { + const t = await resp.json().catch(() => ({})) + throw new Error(t?.error || `HTTP ${resp.status}`) + } + const data = await resp.json() + setSaveMsg('保存成功') + return data + } catch (e) { + setSaveMsg(e instanceof Error ? e.message : '保存失败') + } finally { + setSaving(false) + } + } + const runFullAnalysis = async () => { if (!isChina || !financials || !analysisConfig?.analysis_modules || isAnalysisRunningRef.current) { return; @@ -131,7 +171,6 @@ export default function ReportPage() { fullAnalysisTriggeredRef.current = true; runFullAnalysis(); } - // eslint-disable-next-line react-hooks/exhaustive-deps }, [financials]); // 计算完成比例 @@ -157,7 +196,6 @@ export default function ReportPage() { const v = typeof ms === 'number' ? ms : 0; if (v >= 1000) { const s = v / 1000; - // 保留两位小数 return `${s.toFixed(2)} s`; } return `${v} ms`; @@ -185,7 +223,6 @@ export default function ReportPage() { return map; }, [financialConfig]); - // 创建 tushareParam 到 分组 名称的映射(用于数值缩放判断) const metricGroupMap = useMemo(() => { if (!financialConfig?.api_groups) return {} as Record; const map: Record = {}; @@ -199,19 +236,16 @@ export default function ReportPage() { return map; }, [financialConfig]); - // 数字格式化(千分位,保留两位小数) const numberFormatter = useMemo(() => new Intl.NumberFormat('zh-CN', { minimumFractionDigits: 2, maximumFractionDigits: 2, }), []); - // 数字格式化(千分位,不带小数)- 用于市值 const integerFormatter = useMemo(() => new Intl.NumberFormat('zh-CN', { minimumFractionDigits: 0, maximumFractionDigits: 0, }), []); - // 规范化 Markdown(修复 AI 输出导致的有序列表解析问题) const normalizeMarkdown = useMemo(() => { return (content: string): string => { if (!content) return content; @@ -220,10 +254,7 @@ export default function ReportPage() { for (let i = 0; i < lines.length; i += 1) { let line = lines[i]; - // 将"1、"/"1 、"等替换为"1. " line = line.replace(/^(\s*)(\d+)[、,]\s*/u, '$1$2. '); - - // 如果是独立一行仅有 "1." 之类的编号,则与下一行合并 const onlyIndexMatch = line.match(/^\s*(\d+)\.[\s\u3000]*$/u); if (onlyIndexMatch) { const next = lines[i + 1] ?? ''; @@ -231,68 +262,59 @@ export default function ReportPage() { i += 1; continue; } - out.push(line); } let text = out.join('\n'); - // 在以"1."开头的列表前补一个空行,防止被前段落粘连 text = text.replace(/([^\n])\n(\s*\d+\.\s)/g, (_m, a, b) => `${a}\n\n${b}`); return text; }; }, []); - // 取消独立公司简介加载;统一纳入顺序分析 - - // 检查是否有正在进行的任务 const hasRunningTask = useMemo(() => { if (currentAnalysisTask !== null) return true; if (analysisRecords.some(r => r.status === 'running')) return true; return false; }, [currentAnalysisTask, analysisRecords]); + + // 全部任务是否完成(无运行中任务,且所有分析记录为 done 或 error) + const allTasksCompleted = useMemo(() => { + if (analysisRecords.length === 0) return false; + const allDoneOrErrored = analysisRecords.every(r => r.status === 'done' || r.status === 'error'); + return allDoneOrErrored && !hasRunningTask && currentAnalysisTask === null; + }, [analysisRecords, hasRunningTask, currentAnalysisTask]); + + // 所有任务完成时,停止计时器 + useEffect(() => { + if (allTasksCompleted) { + setStartTime(null); + } + }, [allTasksCompleted]); - // 计时器效果 useEffect(() => { if (!startTime) return; - const interval = setInterval(() => { const now = Date.now(); const elapsed = Math.floor((now - startTime) / 1000); setElapsedSeconds(elapsed); }, 1000); - return () => clearInterval(interval); }, [startTime]); - - - // 重试单个分析任务 const retryAnalysis = async (analysisType: string) => { if (!isChina || !financials || !analysisConfig?.analysis_modules) { return; } - // 允许 company_profile 通过通用通道重试 - - // 清除该任务的已完成标记,允许重新执行 analysisFetchedRefs.current[analysisType] = false; - - // 清除错误状态 setAnalysisStates(prev => ({ ...prev, [analysisType]: { content: '', loading: true, error: null } })); - - // 移除旧的错误记录 setAnalysisRecords(prev => prev.filter(record => record.type !== analysisType)); - const analysisName = analysisConfig.analysis_modules[analysisType]?.name || analysisType; const startTime = new Date().toISOString(); - - // 设置当前任务 setCurrentAnalysisTask(analysisType); - - // 添加执行记录 setAnalysisRecords(prev => [...prev, { type: analysisType, name: analysisName, @@ -301,74 +323,59 @@ export default function ReportPage() { }]); try { + const startedMsLocal = Date.now(); const response = await fetch( - `/api/financials/china/${normalizedTsCode}/analysis/${analysisType}?company_name=${encodeURIComponent(financials?.name || normalizedTsCode)}` + `/api/financials/china/${normalizedTsCode}/analysis/${analysisType}/stream?company_name=${encodeURIComponent(financials?.name || normalizedTsCode)}` ); - if (!response.ok) { throw new Error(`HTTP error! status: ${response.status}`); } - - const data: AnalysisResponse = await response.json(); - const endTime = new Date().toISOString(); - - if (data.success) { - // 更新状态 - setAnalysisStates(prev => ({ - ...prev, - [analysisType]: { - content: data.content, - loading: false, - error: null, - elapsed_ms: data.elapsed_ms, - tokens: data.tokens - } - })); - - // 更新执行记录 - setAnalysisRecords(prev => prev.map(record => - record.type === analysisType - ? { - ...record, - status: 'done', - end_ts: endTime, - duration_ms: data.elapsed_ms, - tokens: data.tokens - } - : record - )); - } else { - // 更新状态 - setAnalysisStates(prev => ({ - ...prev, - [analysisType]: { - content: '', - loading: false, - error: data.error || '生成失败', - elapsed_ms: data.elapsed_ms, - tokens: data.tokens - } - })); - - // 更新执行记录 - setAnalysisRecords(prev => prev.map(record => - record.type === analysisType - ? { - ...record, - status: 'error', - end_ts: endTime, - duration_ms: data.elapsed_ms, - tokens: data.tokens, - error: data.error || '生成失败' - } - : record - )); + const reader = response.body?.getReader(); + const decoder = new TextDecoder(); + let aggregate = ''; + if (reader) { + while (true) { + const { value, done } = await reader.read(); + if (done) break; + const chunk = decoder.decode(value, { stream: true }); + aggregate += chunk; + const snapshot = aggregate; + setAnalysisStates(prev => ({ + ...prev, + [analysisType]: { + ...prev[analysisType], + content: snapshot, + loading: true, + error: null, + } + })); + } } + const endTime = new Date().toISOString(); + const elapsedMs = Date.now() - startedMsLocal; + setAnalysisStates(prev => ({ + ...prev, + [analysisType]: { + ...prev[analysisType], + content: aggregate, + loading: false, + error: null, + elapsed_ms: elapsedMs, + } + })); + setAnalysisRecords(prev => prev.map(record => + record.type === analysisType + ? { + ...record, + status: 'done', + end_ts: endTime, + duration_ms: elapsedMs, + } + : record + )); } catch (err) { const errorMessage = err instanceof Error ? err.message : '加载失败'; const endTime = new Date().toISOString(); - - // 更新状态 setAnalysisStates(prev => ({ ...prev, [analysisType]: { @@ -377,8 +384,6 @@ export default function ReportPage() { error: errorMessage } })); - - // 更新执行记录 setAnalysisRecords(prev => prev.map(record => record.type === analysisType ? { @@ -390,64 +395,44 @@ export default function ReportPage() { : record )); } finally { - // 清除当前任务 setCurrentAnalysisTask(null); - // 标记为已完成(无论成功还是失败) analysisFetchedRefs.current[analysisType] = true; } }; - // 顺序执行各个分析 useEffect(() => { - // 确保所有必需的数据都已加载 if (!isChina || isLoading || error || !financials || !analysisConfig?.analysis_modules || analysisTypes.length === 0) { return; } - - // 如果已经有分析任务正在运行,则跳过 if (isAnalysisRunningRef.current) { return; } - const runAnalysesSequentially = async () => { - // 设置运行标志,防止并发执行 if (isAnalysisRunningRef.current) { return; } isAnalysisRunningRef.current = true; - try { - if (!startTime) { + if (!stopRequestedRef.current && !startTime) { setStartTime(Date.now()); } for (let i = 0; i < analysisTypes.length; i++) { const analysisType = analysisTypes[i]; - if (stopRequestedRef.current) { break; } - if (analysisFetchedRefs.current[analysisType]) { - continue; // 已加载过,跳过 + continue; } - - // Ensure refs and config are defined before proceeding if (!analysisFetchedRefs.current || !analysisConfig?.analysis_modules) { console.error("分析配置或refs未初始化,无法进行分析。"); continue; } - - // 记录当前类型 currentAnalysisTypeRef.current = analysisType; const analysisName = analysisConfig.analysis_modules[analysisType]?.name || analysisType; const startTime = new Date().toISOString(); - - // 设置当前任务 setCurrentAnalysisTask(analysisType); - - - // 设置/更新执行记录为 running(避免重复项) setAnalysisRecords(prev => { const next = [...prev]; const idx = next.findIndex(r => r.type === analysisType); @@ -464,82 +449,66 @@ export default function ReportPage() { } return next; }); - - // 设置加载状态 setAnalysisStates(prev => ({ ...prev, [analysisType]: { content: '', loading: true, error: null } })); - try { abortControllerRef.current?.abort(); abortControllerRef.current = new AbortController(); + const startedMsLocal = Date.now(); const response = await fetch( - `/api/financials/china/${normalizedTsCode}/analysis/${analysisType}?company_name=${encodeURIComponent(financials?.name || normalizedTsCode)}`, + `/api/financials/china/${normalizedTsCode}/analysis/${analysisType}/stream?company_name=${encodeURIComponent(financials?.name || normalizedTsCode)}`, { signal: abortControllerRef.current.signal } ); - if (!response.ok) { throw new Error(`HTTP error! status: ${response.status}`); } - - const data: AnalysisResponse = await response.json(); - const endTime = new Date().toISOString(); - - if (data.success) { - // 更新状态 - setAnalysisStates(prev => ({ - ...prev, - [analysisType]: { - content: data.content, - loading: false, - error: null, - elapsed_ms: data.elapsed_ms, - tokens: data.tokens - } - })); - - // 更新执行记录 - setAnalysisRecords(prev => prev.map(record => - record.type === analysisType - ? { - ...record, - status: 'done', - end_ts: endTime, - duration_ms: data.elapsed_ms, - tokens: data.tokens - } - : record - )); - } else { - // 更新状态 - setAnalysisStates(prev => ({ - ...prev, - [analysisType]: { - content: '', - loading: false, - error: data.error || '生成失败', - elapsed_ms: data.elapsed_ms, - tokens: data.tokens - } - })); - - // 更新执行记录 - setAnalysisRecords(prev => prev.map(record => - record.type === analysisType - ? { - ...record, - status: 'error', - end_ts: endTime, - duration_ms: data.elapsed_ms, - tokens: data.tokens, - error: data.error || '生成失败' - } - : record - )); + const reader = response.body?.getReader(); + const decoder = new TextDecoder(); + let aggregate = ''; + if (reader) { + // 持续读取并追加到内容 + while (true) { + const { value, done } = await reader.read(); + if (done) break; + const chunk = decoder.decode(value, { stream: true }); + aggregate += chunk; + const snapshot = aggregate; + setAnalysisStates(prev => ({ + ...prev, + [analysisType]: { + ...prev[analysisType], + content: snapshot, + loading: true, + error: null, + } + })); + } } + const endTime = new Date().toISOString(); + const elapsedMs = Date.now() - startedMsLocal; + setAnalysisStates(prev => ({ + ...prev, + [analysisType]: { + ...prev[analysisType], + content: aggregate, + loading: false, + error: null, + elapsed_ms: elapsedMs, + } + })); + setAnalysisRecords(prev => prev.map(record => + record.type === analysisType + ? { + ...record, + status: 'done', + end_ts: endTime, + duration_ms: elapsedMs, + } + : record + )); } catch (err) { - // 若为主动中止,则把当前任务恢复为待处理并退出循环 if (err && typeof err === 'object' && (err as any).name === 'AbortError') { setAnalysisStates(prev => ({ ...prev, @@ -555,8 +524,6 @@ export default function ReportPage() { } const errorMessage = err instanceof Error ? err.message : '加载失败'; const endTime = new Date().toISOString(); - - // 更新状态 setAnalysisStates(prev => ({ ...prev, [analysisType]: { @@ -565,8 +532,6 @@ export default function ReportPage() { error: errorMessage } })); - - // 更新执行记录 setAnalysisRecords(prev => prev.map(record => record.type === analysisType ? { @@ -578,20 +543,16 @@ export default function ReportPage() { : record )); } finally { - // 清除当前任务 setCurrentAnalysisTask(null); currentAnalysisTypeRef.current = null; analysisFetchedRefs.current[analysisType] = true; } } } finally { - // 清除运行标志 isAnalysisRunningRef.current = false; } }; - runAnalysesSequentially(); - // eslint-disable-next-line react-hooks/exhaustive-deps }, [isChina, isLoading, error, financials, analysisConfig, analysisTypes, normalizedTsCode, manualRunKey]); const stopAll = () => { @@ -603,14 +564,12 @@ export default function ReportPage() { analysisFetchedRefs.current[currentAnalysisTypeRef.current] = false; } setCurrentAnalysisTask(null); - // 暂停计时器 setStartTime(null); }; const continuePending = () => { if (isAnalysisRunningRef.current) return; stopRequestedRef.current = false; - // 恢复计时器:保持累计秒数继续计时 setStartTime((prev) => (prev == null ? Date.now() - elapsedSeconds * 1000 : prev)); setManualRunKey((k) => k + 1); }; @@ -618,7 +577,6 @@ export default function ReportPage() { return (
- {/* 左侧:报告信息卡片 */} 报告页面 @@ -647,10 +605,9 @@ export default function ReportPage() { )}
+ - - {/* 中间:操作卡片 */} {isChina && ( @@ -666,8 +623,6 @@ export default function ReportPage() { )} - - {/* 右侧:任务状态 */} {isChina && ( @@ -685,10 +640,16 @@ export default function ReportPage() { style={{ width: `${completionProgress}%` }} />
- {/* 操作按钮已移至左侧信息卡片 */} + {allTasksCompleted && ( +
+ + {saveMsg && {saveMsg}} +
+ )} - {/* 当前正在进行的任务 */} {currentAnalysisTask && analysisConfig && ( (() => { const analysisName = analysisConfig.analysis_modules[currentAnalysisTask]?.name || currentAnalysisTask; @@ -704,44 +665,6 @@ export default function ReportPage() { ); })() )} - - {/* 最近一个已完成的任务 */} - {(() => { - // 找到最近一个已完成的任务(按结束时间排序) - const completedRecords = analysisRecords - .filter(r => r.status === 'done' && r.end_ts) - .sort((a, b) => { - if (!a.end_ts || !b.end_ts) return 0; - return new Date(b.end_ts).getTime() - new Date(a.end_ts).getTime(); - }); - - if (completedRecords.length > 0) { - const latestRecord = completedRecords[0]; - return ( -
- -
-
{latestRecord.name}
-
已完成
-
-
- ); - } - - if (financials && !isLoading && !error) { - return ( -
- -
-
财务数据获取
-
已完成
-
-
- ); - } - - return null; - })()}
)} diff --git a/frontend/src/app/reports/[id]/page.tsx b/frontend/src/app/reports/[id]/page.tsx new file mode 100644 index 0000000..5e494b6 --- /dev/null +++ b/frontend/src/app/reports/[id]/page.tsx @@ -0,0 +1,767 @@ +import { prisma } from '../../../lib/prisma' +import ReactMarkdown from 'react-markdown' +import remarkGfm from 'remark-gfm' +import { Tabs, TabsList, TabsTrigger, TabsContent } from '@/components/ui/tabs' +import { Card, CardHeader, CardTitle, CardContent } from '@/components/ui/card' +import { Table, TableHeader, TableBody, TableHead, TableRow, TableCell } from '@/components/ui/table' + +type Report = { + id: string + symbol: string + content: any + createdAt: string +} + +export default async function ReportDetailPage({ params }: { params: Promise<{ id: string }> }) { + const { id } = await params + const data = await prisma.report.findUnique({ where: { id } }) + + if (!data) { + return
未找到报告
+ } + + const content = (data.content ?? {}) as any + const analyses = (content?.analyses ?? {}) as Record + + // 规范化显示顺序(与生成报告时一致的中文 Tabs 次序) + const ordered = [ + { id: 'financial', label: '财务数据' }, + { id: 'company_profile', label: '公司简介' }, + { id: 'fundamentals', label: '基本面分析' }, + { id: 'bullish', label: '看涨分析' }, + { id: 'bearish', label: '看跌分析' }, + { id: 'market', label: '市场分析' }, + { id: 'news', label: '新闻分析' }, + { id: 'trading', label: '交易分析' }, + { id: 'insiders_institutions', label: '内部人及机构动向分析' }, + { id: 'final_conclusion', label: '最终结论' }, + { id: 'meta', label: '元数据' }, + ] as const + + // 每个规范化 id 对应的候选后端 key(兼容不同命名) + const candidateKeys: Record = { + company_profile: ['company_profile'], + fundamentals: ['fundamental_analysis', 'fundamentals_analysis', 'basic_analysis', 'basics_analysis'], + bullish: ['bullish_analysis', 'bullish_case', 'bull_case'], + bearish: ['bearish_analysis', 'bearish_case', 'bear_case'], + market: ['market_analysis'], + news: ['news_analysis'], + trading: ['trading_analysis'], + insiders_institutions: ['insider_institutional', 'insiders_institutions_analysis', 'insider_institution_analysis', 'insider_analysis'], + final_conclusion: ['final_conclusion', 'conclusion', 'investment_thesis'], + } + + const findKey = (id: string): string | null => { + const c = candidateKeys[id] + if (!c) return null + for (const k of c) { + if (Object.prototype.hasOwnProperty.call(analyses, k)) return k + } + return null + } + + // 去掉正文开头重复的大标题(Markdown 以 # 开头的行) + const stripTopHeadings = (text: string): string => { + const lines = String(text || '').split(/\r?\n/) + let i = 0 + while (i < lines.length) { + const t = lines[i]?.trim() || '' + if (t === '') { i += 1; continue } + if (/^#{1,6}\s+/.test(t)) { i += 1; continue } + break + } + return lines.slice(i).join('\n').trimStart() + } + + return ( +
+
+

报告详情

+
{new Date(data.createdAt).toLocaleString()}
+
+ + + + 基本信息 + + +
+ 股票代码:{data.symbol} + {content?.normalizedSymbol && ( + 标准代码:{String(content.normalizedSymbol)} + )} + {(() => { + const companyName = (content?.financials?.name as string | undefined) || (content as any)?.company_name || (content as any)?.companyName + return companyName ? ( + 公司名称:{companyName} + ) : null + })()} + {content?.market && ( + 市场:{String(content.market)} + )} +
+
+
+ + + + {ordered.map((o, idx) => ( + {`${idx + 1}. ${o.label}`} + ))} + + + + + + 财务数据(保存自读取结果) + + + {(() => { + const fin = (content?.financials ?? null) as null | { + ts_code?: string + name?: string + series?: Record> + meta?: any + } + + const series = fin?.series || {} + const allPoints = Object.values(series).flat() as Array<{ year: string; value: number | null; month?: number | null }> + const years = Array.from(new Set(allPoints.map(p => p?.year).filter(Boolean) as string[])).sort((a, b) => Number(b) - Number(a)) + + const numberFormatter = new Intl.NumberFormat('zh-CN', { minimumFractionDigits: 2, maximumFractionDigits: 2 }) + const integerFormatter = new Intl.NumberFormat('zh-CN', { minimumFractionDigits: 0, maximumFractionDigits: 0 }) + + const metricDisplayMap: Record = { + roe: 'ROE', + roa: 'ROA', + roic: 'ROCE/ROIC', + grossprofit_margin: '毛利率', + netprofit_margin: '净利润率', + tr_yoy: '收入增速', + dt_netprofit_yoy: '净利润增速', + revenue: '收入', + n_income: '净利润', + n_cashflow_act: '经营现金流', + c_pay_acq_const_fiolta: '资本开支', + cash_div_tax: '分红', + buyback: '回购', + total_assets: '总资产', + total_hldr_eqy_exc_min_int: '股东权益', + goodwill: '商誉', + total_mv: '市值', + } + + const metricGroupMap: Record = { + revenue: 'income', + n_income: 'income', + total_assets: 'balancesheet', + total_hldr_eqy_exc_min_int: 'balancesheet', + goodwill: 'balancesheet', + n_cashflow_act: 'cashflow', + c_pay_acq_const_fiolta: 'cashflow', + } + + if (years.length === 0) { + return ( +
+ 暂无保存的财务数据。下次保存报告时会一并保存财务数据。 +
+ ) + } + + const currentYearStr = String(new Date().getFullYear()) + const getQuarter = (month: number | null | undefined) => { + if (month == null) return null + return Math.floor((month - 1) / 3) + 1 + } + + const PERCENT_KEYS = new Set(['roe','roa','roic','grossprofit_margin','netprofit_margin','tr_yoy','dt_netprofit_yoy']) + + const ORDER: Array<{ key: string; label?: string; kind?: 'computed' }> = [ + { key: 'roe' }, + { key: 'roa' }, + { key: 'roic' }, + { key: 'grossprofit_margin' }, + { key: 'netprofit_margin' }, + { key: 'revenue' }, + { key: 'tr_yoy' }, + { key: 'n_income' }, + { key: 'dt_netprofit_yoy' }, + { key: 'n_cashflow_act' }, + { key: 'c_pay_acq_const_fiolta' }, + { key: '__free_cash_flow', label: '自由现金流', kind: 'computed' }, + { key: 'cash_div_tax', label: '分红' }, + { key: 'buyback', label: '回购' }, + { key: 'total_assets' }, + { key: 'total_hldr_eqy_exc_min_int' }, + { key: 'goodwill' }, + ] + + return ( +
+ + + + 指标 + {years.map((y) => { + const yearData = allPoints.find(p => p.year === y) + const isCurrent = y === currentYearStr + const quarter = yearData?.month ? getQuarter(yearData.month) : null + const label = isCurrent && quarter ? `${y} Q${quarter}` : y + return {label} + })} + + + + {(() => { + const summaryRow = ( + + 主要指标 + {years.map((y) => ( + + ))} + + ) + + const rows = ORDER.map(({ key, label, kind }) => { + const isComputed = kind === 'computed' && key === '__free_cash_flow' + const points = series[key] as Array<{ year?: string; value?: number | null }>|undefined + const operating = series['n_cashflow_act'] as Array<{ year?: string; value?: number | null }>|undefined + const capex = series['c_pay_acq_const_fiolta'] as Array<{ year?: string; value?: number | null }>|undefined + return ( + + {label || metricDisplayMap[key] || key} + {years.map((y) => { + let v: number | null | undefined = undefined + if (isComputed) { + const op = operating?.find(p => p?.year === y)?.value ?? null + const cp = capex?.find(p => p?.year === y)?.value ?? null + v = (op == null || cp == null) ? null : (Number(op) - Number(cp)) + } else { + v = points?.find(p => p?.year === y)?.value ?? null + } + + const groupName = metricGroupMap[key] + const rawNum = typeof v === 'number' ? v : (v == null ? null : Number(v)) + if (rawNum == null || Number.isNaN(rawNum)) { + return - + } + if (PERCENT_KEYS.has(key)) { + const perc = Math.abs(rawNum) <= 1 ? rawNum * 100 : rawNum + const text = Number.isFinite(perc) ? numberFormatter.format(perc) : '-' + const isGrowthRow = key === 'tr_yoy' || key === 'dt_netprofit_yoy' + if (isGrowthRow) { + const isNeg = typeof perc === 'number' && perc < 0 + return ( + + {text}% + + ) + } + if (key === 'roe' || key === 'roic') { + const highlight = typeof perc === 'number' && perc > 12 + return ( + {`${text}%`} + ) + } + return {`${text}%`} + } else { + const isFinGroup = groupName === 'income' || groupName === 'balancesheet' || groupName === 'cashflow' + const scaled = key === 'total_mv' ? rawNum / 10000 : (isFinGroup || isComputed ? rawNum / 1e8 : rawNum) + const formatter = key === 'total_mv' ? integerFormatter : numberFormatter + const text = Number.isFinite(scaled) ? formatter.format(scaled) : '-' + if (key === '__free_cash_flow') { + const isNeg = typeof scaled === 'number' && scaled < 0 + return ( + {isNeg ? {text} : text} + ) + } + return {text} + } + })} + + ) + }) + + const getVal = (arr: Array<{ year?: string; value?: number | null }> | undefined, y: string) => { + const v = arr?.find(p => p?.year === y)?.value + return typeof v === 'number' ? v : (v == null ? null : Number(v)) + } + + // 费用指标 + const feeHeaderRow = ( + + 费用指标 + {years.map((y) => ( + + ))} + + ) + const feeRows = [ + { key: '__sell_rate', label: '销售费用率', num: series['sell_exp'] as any, den: series['revenue'] as any }, + { key: '__admin_rate', label: '管理费用率', num: series['admin_exp'] as any, den: series['revenue'] as any }, + { key: '__rd_rate', label: '研发费用率', num: series['rd_exp'] as any, den: series['revenue'] as any }, + { key: '__other_fee_rate', label: '其他费用率', num: undefined, den: series['revenue'] as any }, + { key: '__tax_rate', label: '所得税率', num: series['tax_to_ebt'] as any, den: undefined }, + { key: '__depr_ratio', label: '折旧费用占比', num: series['depr_fa_coga_dpba'] as any, den: series['revenue'] as any }, + ].map(({ key, label, num, den }) => ( + + {label} + {years.map((y) => { + let rate: number | null = null + if (key === '__tax_rate') { + const numerator = getVal(num, y) + if (numerator == null || Number.isNaN(numerator)) { + rate = null + } else if (Math.abs(numerator) <= 1) { + rate = numerator * 100 + } else { + rate = numerator + } + } else if (key === '__other_fee_rate') { + const gpRaw = getVal(series['grossprofit_margin'] as any, y) + const npRaw = getVal(series['netprofit_margin'] as any, y) + const rev = getVal(series['revenue'] as any, y) + const sell = getVal(series['sell_exp'] as any, y) + const admin = getVal(series['admin_exp'] as any, y) + const rd = getVal(series['rd_exp'] as any, y) + if (gpRaw == null || npRaw == null || rev == null || rev === 0 || sell == null || admin == null || rd == null) { + rate = null + } else { + const gp = Math.abs(gpRaw) <= 1 ? gpRaw * 100 : gpRaw + const np = Math.abs(npRaw) <= 1 ? npRaw * 100 : npRaw + const sellRate = (sell / rev) * 100 + const adminRate = (admin / rev) * 100 + const rdRate = (rd / rev) * 100 + rate = gp - np - sellRate - adminRate - rdRate + } + } else { + const numerator = getVal(num, y) + const denominator = getVal(den, y) + if (numerator == null || denominator == null || denominator === 0) { + rate = null + } else { + rate = (numerator / denominator) * 100 + } + } + if (rate == null || !Number.isFinite(rate)) { + return - + } + const rateText = numberFormatter.format(rate) + const isNegative = rate < 0 + return ( + + {isNegative ? {rateText}% : `${rateText}%`} + + ) + })} + + )) + + // 资产占比 + const assetHeaderRow = ( + + 资产占比 + {years.map((y) => ( + + ))} + + ) + const ratioCell = (value: number | null, y: string) => { + if (value == null || !Number.isFinite(value)) { + return - + } + const text = numberFormatter.format(value) + const isNegative = value < 0 + return ( + + {isNegative ? {text}% : `${text}%`} + + ) + } + const assetRows = [ + { key: '__money_cap_ratio', label: '现金占比', calc: (y: string) => { + const num = getVal(series['money_cap'] as any, y) + const den = getVal(series['total_assets'] as any, y) + return num == null || den == null || den === 0 ? null : (num / den) * 100 + } }, + { key: '__inventories_ratio', label: '库存占比', calc: (y: string) => { + const num = getVal(series['inventories'] as any, y) + const den = getVal(series['total_assets'] as any, y) + return num == null || den == null || den === 0 ? null : (num / den) * 100 + } }, + { key: '__ar_ratio', label: '应收款占比', calc: (y: string) => { + const num = getVal(series['accounts_receiv_bill'] as any, y) + const den = getVal(series['total_assets'] as any, y) + return num == null || den == null || den === 0 ? null : (num / den) * 100 + } }, + { key: '__prepay_ratio', label: '预付款占比', calc: (y: string) => { + const num = getVal(series['prepayment'] as any, y) + const den = getVal(series['total_assets'] as any, y) + return num == null || den == null || den === 0 ? null : (num / den) * 100 + } }, + { key: '__fix_assets_ratio', label: '固定资产占比', calc: (y: string) => { + const num = getVal(series['fix_assets'] as any, y) + const den = getVal(series['total_assets'] as any, y) + return num == null || den == null || den === 0 ? null : (num / den) * 100 + } }, + { key: '__lt_invest_ratio', label: '长期投资占比', calc: (y: string) => { + const num = getVal(series['lt_eqt_invest'] as any, y) + const den = getVal(series['total_assets'] as any, y) + return num == null || den == null || den === 0 ? null : (num / den) * 100 + } }, + { key: '__goodwill_ratio', label: '商誉占比', calc: (y: string) => { + const num = getVal(series['goodwill'] as any, y) + const den = getVal(series['total_assets'] as any, y) + return num == null || den == null || den === 0 ? null : (num / den) * 100 + } }, + { key: '__other_assets_ratio', label: '其他资产占比', calc: (y: string) => { + const total = getVal(series['total_assets'] as any, y) + if (total == null || total === 0) return null + const parts = [ + getVal(series['money_cap'] as any, y) || 0, + getVal(series['inventories'] as any, y) || 0, + getVal(series['accounts_receiv_bill'] as any, y) || 0, + getVal(series['prepayment'] as any, y) || 0, + getVal(series['fix_assets'] as any, y) || 0, + getVal(series['lt_eqt_invest'] as any, y) || 0, + getVal(series['goodwill'] as any, y) || 0, + ] + const sumKnown = parts.reduce((acc: number, v: number) => acc + v, 0) + return ((total - sumKnown) / total) * 100 + } }, + { key: '__ap_ratio', label: '应付款占比', calc: (y: string) => { + const num = getVal(series['accounts_pay'] as any, y) + const den = getVal(series['total_assets'] as any, y) + return num == null || den == null || den === 0 ? null : (num / den) * 100 + } }, + { key: '__adv_ratio', label: '预收款占比', calc: (y: string) => { + const adv = getVal(series['adv_receipts'] as any, y) || 0 + const contractLiab = getVal(series['contract_liab'] as any, y) || 0 + const num = adv + contractLiab + const den = getVal(series['total_assets'] as any, y) + return den == null || den === 0 ? null : (num / den) * 100 + } }, + { key: '__st_borr_ratio', label: '短期借款占比', calc: (y: string) => { + const num = getVal(series['st_borr'] as any, y) + const den = getVal(series['total_assets'] as any, y) + return num == null || den == null || den === 0 ? null : (num / den) * 100 + } }, + { key: '__lt_borr_ratio', label: '长期借款占比', calc: (y: string) => { + const num = getVal(series['lt_borr'] as any, y) + const den = getVal(series['total_assets'] as any, y) + return num == null || den == null || den === 0 ? null : (num / den) * 100 + } }, + { key: '__interest_bearing_debt_ratio', label: '有息负债率', calc: (y: string) => { + const total = getVal(series['total_assets'] as any, y) + if (total == null || total === 0) return null + const st = getVal(series['st_borr'] as any, y) || 0 + const lt = getVal(series['lt_borr'] as any, y) || 0 + return ((st + lt) / total) * 100 + } }, + { key: '__operating_assets_ratio', label: '运营资产占比', calc: (y: string) => { + const total = getVal(series['total_assets'] as any, y) + if (total == null || total === 0) return null + const inv = getVal(series['inventories'] as any, y) || 0 + const ar = getVal(series['accounts_receiv_bill'] as any, y) || 0 + const pre = getVal(series['prepayment'] as any, y) || 0 + const ap = getVal(series['accounts_pay'] as any, y) || 0 + const adv = getVal(series['adv_receipts'] as any, y) || 0 + const contractLiab = getVal(series['contract_liab'] as any, y) || 0 + const operating = inv + ar + pre - ap - adv - contractLiab + return (operating / total) * 100 + } }, + ].map(({ key, label, calc }) => ( + + {label} + {years.map((y) => ratioCell(calc(y), y))} + + )) + + // 周转能力 + const turnoverHeaderRow = ( + + 周转能力 + {years.map((y) => ( + + ))} + + ) + const getYearNumber = (ys: string) => { + const n = Number(ys) + return Number.isFinite(n) ? n : null + } + const getPoint = (arr: Array<{ year?: string; value?: number | null }> | undefined, year: string) => { + return arr?.find(p => p?.year === year)?.value ?? null + } + const getAvg = (arr: Array<{ year?: string; value?: number | null }> | undefined, year: string) => { + const curr = getPoint(arr, year) + const yNum = getYearNumber(year) + const prevYear = yNum != null ? String(yNum - 1) : null + const prev = prevYear ? getPoint(arr, prevYear) : null + const c = typeof curr === 'number' ? curr : (curr == null ? null : Number(curr)) + const p = typeof prev === 'number' ? prev : (prev == null ? null : Number(prev)) + if (c == null) return null + if (p == null) return c + return (c + p) / 2 + } + const getMarginRatio = (year: string) => { + const gmRaw = getPoint(series['grossprofit_margin'] as any, year) + if (gmRaw == null) return null + const gmNum = typeof gmRaw === 'number' ? gmRaw : Number(gmRaw) + if (!Number.isFinite(gmNum)) return null + return Math.abs(gmNum) <= 1 ? gmNum : gmNum / 100 + } + const getRevenue = (year: string) => { + const rev = getPoint(series['revenue'] as any, year) + const r = typeof rev === 'number' ? rev : (rev == null ? null : Number(rev)) + return r + } + const getCOGS = (year: string) => { + const rev = getRevenue(year) + const gm = getMarginRatio(year) + if (rev == null || gm == null) return null + const cogs = rev * (1 - gm) + return Number.isFinite(cogs) ? cogs : null + } + const turnoverItems: Array<{ key: string; label: string }> = [ + { key: 'invturn_days', label: '存货周转天数' }, + { key: 'arturn_days', label: '应收款周转天数' }, + { key: 'payturn_days', label: '应付款周转天数' }, + { key: 'fa_turn', label: '固定资产周转率' }, + { key: 'assets_turn', label: '总资产周转率' }, + ] + const turnoverRows = turnoverItems.map(({ key, label }) => ( + + {label} + {years.map((y) => { + let value: number | null = null + if (key === 'payturn_days') { + const avgAP = getAvg(series['accounts_pay'] as any, y) + const cogs = getCOGS(y) + value = avgAP == null || cogs == null || cogs === 0 ? null : (365 * avgAP) / cogs + } else { + const arr = series[key] as Array<{ year?: string; value?: number | null }> | undefined + const v = arr?.find(p => p?.year === y)?.value ?? null + const num = typeof v === 'number' ? v : (v == null ? null : Number(v)) + value = num == null || Number.isNaN(num) ? null : num + } + if (value == null || !Number.isFinite(value)) { + return - + } + const text = numberFormatter.format(value) + if (key === 'arturn_days' && value > 90) { + return ( + {text} + ) + } + return {text} + })} + + )) + + // 人均效率 + const perCapitaHeaderRow = ( + + 人均效率 + {years.map((y) => ( + + ))} + + ) + const employeesRow = ( + + 员工人数 + {years.map((y) => { + const v = getVal(series['employees'] as any, y) + if (v == null || !Number.isFinite(v)) { + return - + } + return {integerFormatter.format(Math.round(v))} + })} + + ) + const revPerEmpRow = ( + + 人均创收(万元) + {years.map((y) => { + const rev = getVal(series['revenue'] as any, y) + const emp = getVal(series['employees'] as any, y) + if (rev == null || emp == null || emp === 0) { + return - + } + const val = (rev / emp) / 10000 + return {numberFormatter.format(val)} + })} + + ) + const profitPerEmpRow = ( + + 人均创利(万元) + {years.map((y) => { + const prof = getVal(series['n_income'] as any, y) + const emp = getVal(series['employees'] as any, y) + if (prof == null || emp == null || emp === 0) { + return - + } + const val = (prof / emp) / 10000 + return {numberFormatter.format(val)} + })} + + ) + const salaryPerEmpRow = ( + + 人均工资(万元) + {years.map((y) => { + const salaryPaid = getVal(series['c_paid_to_for_empl'] as any, y) + const emp = getVal(series['employees'] as any, y) + if (salaryPaid == null || emp == null || emp === 0) { + return - + } + const val = (salaryPaid / emp) / 10000 + return {numberFormatter.format(val)} + })} + + ) + + // 市场表现 + const marketHeaderRow = ( + + 市场表现 + {years.map((y) => ( + + ))} + + ) + const priceRow = ( + + 股价 + {years.map((y) => { + const arr = series['close'] as Array<{ year?: string; value?: number | null }> | undefined + const v = arr?.find(p => p?.year === y)?.value ?? null + const num = typeof v === 'number' ? v : (v == null ? null : Number(v)) + if (num == null || !Number.isFinite(num)) return - + return {numberFormatter.format(num)} + })} + + ) + const marketCapRow = ( + + 市值(亿元) + {years.map((y) => { + const arr = series['total_mv'] as Array<{ year?: string; value?: number | null }> | undefined + const v = arr?.find(p => p?.year === y)?.value ?? null + const num = typeof v === 'number' ? v : (v == null ? null : Number(v)) + if (num == null || !Number.isFinite(num)) return - + const scaled = num / 10000 + return {integerFormatter.format(Math.round(scaled))} + })} + + ) + const peRow = ( + + PE + {years.map((y) => { + const arr = series['pe'] as Array<{ year?: string; value?: number | null }> | undefined + const v = arr?.find(p => p?.year === y)?.value ?? null + const num = typeof v === 'number' ? v : (v == null ? null : Number(v)) + if (num == null || !Number.isFinite(num)) return - + return {numberFormatter.format(num)} + })} + + ) + const pbRow = ( + + PB + {years.map((y) => { + const arr = series['pb'] as Array<{ year?: string; value?: number | null }> | undefined + const v = arr?.find(p => p?.year === y)?.value ?? null + const num = typeof v === 'number' ? v : (v == null ? null : Number(v)) + if (num == null || !Number.isFinite(num)) return - + return {numberFormatter.format(num)} + })} + + ) + const holderNumRow = ( + + 股东户数 + {years.map((y) => { + const arr = series['holder_num'] as Array<{ year?: string; value?: number | null }> | undefined + const v = arr?.find(p => p?.year === y)?.value ?? null + const num = typeof v === 'number' ? v : (v == null ? null : Number(v)) + if (num == null || !Number.isFinite(num)) return - + return {integerFormatter.format(Math.round(num))} + })} + + ) + + return [ + summaryRow, + ...rows, + feeHeaderRow, + ...feeRows, + assetHeaderRow, + ...assetRows, + turnoverHeaderRow, + ...turnoverRows, + perCapitaHeaderRow, + employeesRow, + revPerEmpRow, + profitPerEmpRow, + salaryPerEmpRow, + marketHeaderRow, + priceRow, + marketCapRow, + peRow, + pbRow, + holderNumRow, + ] + })()} + +
+
+ ) + })()} + +
+
+
+ + + + + 元数据(数据库原始记录) + + +
+                {JSON.stringify(data, null, 2)}
+              
+
+
+
+ + {ordered.filter(o => o.id !== 'financial' && o.id !== 'meta').map((o) => { + const key = findKey(o.id) + const item = key ? analyses[key] || {} : {} + const md = stripTopHeadings(String(item?.content || '')) + const err = item?.error as string | undefined + return ( + + {err &&
{err}
} +
+
+

{o.label}

+ + {md} + +
+
+
+ ) + })} +
+
+ ) +} diff --git a/frontend/src/app/reports/page.tsx b/frontend/src/app/reports/page.tsx index f912e21..4ba696f 100644 --- a/frontend/src/app/reports/page.tsx +++ b/frontend/src/app/reports/page.tsx @@ -1,48 +1,60 @@ -import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card"; -import { Badge } from "@/components/ui/badge"; +import Link from 'next/link' +import { headers } from 'next/headers' + +async function fetchReports(baseUrl: string) { + const url = `${baseUrl}/api/reports?limit=50` + const resp = await fetch(url, { cache: 'no-store' }) + if (!resp.ok) { + return { items: [], total: 0 } + } + return resp.json() as Promise<{ items: Array<{ id: string; symbol: string; createdAt: string; content?: any }>; total: number }> +} + +export default async function ReportsPage() { + const h = await headers() + const host = h.get('x-forwarded-host') || h.get('host') || 'localhost:3000' + const proto = h.get('x-forwarded-proto') || 'http' + const base = process.env.NEXT_PUBLIC_BASE_URL || `${proto}://${host}` + const { items, total } = await fetchReports(base) -export default function ReportsPage() { return ( -
-
-

报表中心

-

查看与管理财务报表与分析结果。

-
- -
- - - 利润表 - 收入、成本、净利润 - - - 季度 - 年度 - - - - - - 资产负债表 - 资产、负债、权益 - - - 结构 - 趋势 - - - - - - 现金流量表 - 经营、投资、筹资 - - - 自由现金流 - 质量 - - +
+
+

历史分析报告

+
共 {total} 条
+ + {items.length === 0 ? ( +

暂无报告

+ ) : ( +
+ + + + + + + + + + + {items.map((r) => { + const name = (r as any)?.content?.financials?.name || (r as any)?.content?.company_name || '' + return ( + + + + + + + ) + })} + +
股票代码公司名称创建时间操作
{r.symbol}{name || -}{new Date(r.createdAt).toLocaleString()} + 查看 +
+
+ )}
- ); + ) } \ No newline at end of file diff --git a/frontend/src/lib/prisma.ts b/frontend/src/lib/prisma.ts new file mode 100644 index 0000000..290ba6d --- /dev/null +++ b/frontend/src/lib/prisma.ts @@ -0,0 +1,13 @@ +import { PrismaClient } from '@prisma/client' + +const globalForPrisma = global as unknown as { prisma?: PrismaClient } + +export const prisma = + globalForPrisma.prisma || + new PrismaClient({ + log: ['error', 'warn'] + }) + +if (process.env.NODE_ENV !== 'production') globalForPrisma.prisma = prisma + + diff --git a/scripts/dev.sh b/scripts/dev.sh index 399d4c4..9996284 100755 --- a/scripts/dev.sh +++ b/scripts/dev.sh @@ -13,9 +13,12 @@ BACKEND_DIR="$REPO_ROOT/backend" FRONTEND_DIR="$REPO_ROOT/frontend" CONFIG_FILE="$REPO_ROOT/config/config.json" +# Guard to ensure cleanup runs only once +__CLEANED_UP=0 + # Port configuration BACKEND_PORT=8000 -FRONTEND_PORT=3000 +FRONTEND_PORT=3001 # Kill process using specified port kill_port() { @@ -70,8 +73,10 @@ run_backend() { ensure_backend cd "$BACKEND_DIR" # Run and colorize output (avoid stdbuf on macOS) - UVICORN_CMD=(uvicorn app.main:app --reload --port "$BACKEND_PORT") - "${UVICORN_CMD[@]}" 2>&1 | awk -v p="[BACKEND]" -v color="$GREEN" -v reset="$RESET" '{print color p " " $0 reset}' + UVICORN_CMD=(uvicorn app.main:app --reload --port "$BACKEND_PORT" --log-level info) + "${UVICORN_CMD[@]}" 2>&1 | while IFS= read -r line; do + printf "%b[%s] [BACKEND] %s%b\n" "$GREEN" "$(date '+%Y-%m-%d %H:%M:%S')" "$line" "$RESET" + done } ensure_frontend() { @@ -85,27 +90,70 @@ ensure_frontend() { run_frontend() { ensure_frontend cd "$FRONTEND_DIR" - npm run dev 2>&1 | awk -v p="[FRONTEND]" -v color="$CYAN" -v reset="$RESET" '{print color p " " $0 reset}' + npm run dev 2>&1 | while IFS= read -r line; do + printf "%b[%s] [FRONTEND] %s%b\n" "$CYAN" "$(date '+%Y-%m-%d %H:%M:%S')" "$line" "$RESET" + done +} + +# Recursively kill a process tree (children first), with optional signal (default TERM) +kill_tree() { + local pid="$1" + local signal="${2:-TERM}" + if [[ -z "${pid:-}" ]]; then + return + fi + # Kill children first + local children + children=$(pgrep -P "$pid" 2>/dev/null || true) + if [[ -n "${children:-}" ]]; then + for child in $children; do + kill_tree "$child" "$signal" + done + fi + # Then the parent + kill -"$signal" "$pid" 2>/dev/null || true } cleanup() { + # Ensure this runs only once even if multiple signals (INT/TERM/EXIT) arrive + if [[ $__CLEANED_UP -eq 1 ]]; then + return + fi + __CLEANED_UP=1 + echo -e "\n${YELLOW}[CLEANUP]${RESET} Stopping services..." - - # Kill process groups to ensure all child processes are terminated + + # Gracefully stop trees for backend and frontend, then escalate if needed if [[ -n "${BACKEND_PID:-}" ]]; then - kill -TERM -"$BACKEND_PID" 2>/dev/null || kill "$BACKEND_PID" 2>/dev/null || true + kill_tree "$BACKEND_PID" TERM fi if [[ -n "${FRONTEND_PID:-}" ]]; then - kill -TERM -"$FRONTEND_PID" 2>/dev/null || kill "$FRONTEND_PID" 2>/dev/null || true + kill_tree "$FRONTEND_PID" TERM fi - - sleep 1 - - # Force kill any remaining processes on these ports + + # Wait up to ~3s for graceful shutdown + for _ in 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15; do + local backend_alive=0 frontend_alive=0 + if [[ -n "${BACKEND_PID:-}" ]] && kill -0 "$BACKEND_PID" 2>/dev/null; then backend_alive=1; fi + if [[ -n "${FRONTEND_PID:-}" ]] && kill -0 "$FRONTEND_PID" 2>/dev/null; then frontend_alive=1; fi + if [[ $backend_alive -eq 0 && $frontend_alive -eq 0 ]]; then + break + fi + sleep 0.2 + done + + # Escalate to KILL if still alive + if [[ -n "${BACKEND_PID:-}" ]] && kill -0 "$BACKEND_PID" 2>/dev/null; then + kill_tree "$BACKEND_PID" KILL + fi + if [[ -n "${FRONTEND_PID:-}" ]] && kill -0 "$FRONTEND_PID" 2>/dev/null; then + kill_tree "$FRONTEND_PID" KILL + fi + + # As a final safeguard, free the ports kill_port "$BACKEND_PORT" kill_port "$FRONTEND_PORT" - - wait 2>/dev/null || true + echo -e "${GREEN}[CLEANUP]${RESET} All services stopped." } @@ -116,8 +164,8 @@ main() { kill_port "$BACKEND_PORT" kill_port "$FRONTEND_PORT" - echo -e "${GREEN}[BACKEND]${RESET} API: http://127.0.0.1:$BACKEND_PORT" - echo -e "${CYAN}[FRONTEND]${RESET} APP: http://127.0.0.1:$FRONTEND_PORT\n" + echo -e "${GREEN}[$(date '+%Y-%m-%d %H:%M:%S')] [BACKEND]${RESET} API: http://127.0.0.1:$BACKEND_PORT" + echo -e "${CYAN}[$(date '+%Y-%m-%d %H:%M:%S')] [FRONTEND]${RESET} APP: http://127.0.0.1:$FRONTEND_PORT\n" run_backend & BACKEND_PID=$! run_frontend & FRONTEND_PID=$! diff --git a/scripts/test-employees.py b/scripts/test-employees.py index 5caaa6d..51c29d7 100755 --- a/scripts/test-employees.py +++ b/scripts/test-employees.py @@ -10,7 +10,7 @@ import json # 添加项目根目录到Python路径 sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'backend')) -from app.services.tushare_client import TushareClient +from tushare_legacy_client import TushareLegacyClient as TushareClient async def test_employees_data(): diff --git a/scripts/test-holder-number.py b/scripts/test-holder-number.py index 7011824..569c6c4 100755 --- a/scripts/test-holder-number.py +++ b/scripts/test-holder-number.py @@ -11,7 +11,7 @@ from datetime import datetime, timedelta # 添加项目根目录到Python路径 sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'backend')) -from app.services.tushare_client import TushareClient +from tushare_legacy_client import TushareLegacyClient as TushareClient async def test_holder_number_data(): diff --git a/scripts/test-holder-processing.py b/scripts/test-holder-processing.py index c4865ec..80abb3e 100755 --- a/scripts/test-holder-processing.py +++ b/scripts/test-holder-processing.py @@ -11,7 +11,7 @@ from datetime import datetime, timedelta # 添加项目根目录到Python路径 sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'backend')) -from app.services.tushare_client import TushareClient +from tushare_legacy_client import TushareLegacyClient as TushareClient async def test_holder_num_processing(): diff --git a/scripts/test-tax-to-ebt.py b/scripts/test-tax-to-ebt.py index ab504b1..946b39d 100644 --- a/scripts/test-tax-to-ebt.py +++ b/scripts/test-tax-to-ebt.py @@ -9,7 +9,7 @@ import json # 添加 backend 目录到 Python 路径 sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "backend")) -from app.services.tushare_client import TushareClient +from tushare_legacy_client import TushareLegacyClient as TushareClient async def test_tax_to_ebt(): # 读取配置获取 token diff --git a/scripts/tushare_legacy_client.py b/scripts/tushare_legacy_client.py new file mode 100644 index 0000000..0e5fe8b --- /dev/null +++ b/scripts/tushare_legacy_client.py @@ -0,0 +1,41 @@ +import sys +import os +import asyncio +from typing import Any, Dict, List, Optional + +# Add backend to path to import TushareProvider +sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "backend")) +from app.data_providers.tushare import TushareProvider + +class TushareLegacyClient: + """ + An adapter to mimic the old TushareClient for legacy scripts, + but uses the new TushareProvider under the hood. + """ + def __init__(self, token: str): + if not token: + raise ValueError("Token must be provided.") + self.provider = TushareProvider(token=token) + + async def query( + self, + api_name: str, + params: Optional[Dict[str, Any]] = None, + fields: Optional[str] = None, # Note: fields are not used in the new provider's _query + ) -> List[Dict[str, Any]]: + """ + Mimics the .query() method by calling the provider's internal _query method. + """ + # The new _query method is protected, but we call it here for the script's sake. + return await self.provider._query(api_name=api_name, params=params, fields=fields) + + async def aclose(self): + """Mimic aclose to allow 'async with' syntax.""" + if hasattr(self.provider, '_client') and self.provider._client: + await self.provider._client.aclose() + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.aclose()