- Backend: Introduced new endpoints for LLM configuration retrieval and updates in `config.py`, allowing dynamic management of LLM provider settings. - Updated schemas to include `AlphaEngineConfig` for better integration with the new provider. - Frontend: Added state management for AlphaEngine API credentials in the configuration page, ensuring seamless user experience. - Configuration files updated to reflect changes in LLM provider settings and API keys. BREAKING CHANGE: The default LLM provider has been changed from `new_api` to `alpha_engine`, requiring updates to existing configurations.
160 lines
6.6 KiB
Python
160 lines
6.6 KiB
Python
import logging
|
|
import os
|
|
import json
|
|
from typing import Dict
|
|
from fastapi import APIRouter, BackgroundTasks, HTTPException, Depends
|
|
|
|
# Lazy loader for DataManager
|
|
_dm = None
|
|
def get_dm():
|
|
global _dm
|
|
if _dm is not None:
|
|
return _dm
|
|
try:
|
|
from app.data_manager import data_manager as real_dm
|
|
_dm = real_dm
|
|
return _dm
|
|
except Exception:
|
|
# Return a stub if the real one fails to import
|
|
class _StubDM:
|
|
async def get_stock_basic(self, stock_code: str): return None
|
|
async def get_financial_statements(self, stock_code: str, report_dates): return []
|
|
_dm = _StubDM()
|
|
return _dm
|
|
|
|
from app.services.analysis_client import AnalysisClient, load_analysis_config
|
|
from app.core.dependencies import get_config_manager
|
|
from app.services.config_manager import ConfigManager
|
|
from app.services.client_factory import create_analysis_client
|
|
|
|
router = APIRouter()
|
|
logger = logging.getLogger(__name__)
|
|
|
|
# Constants for config paths
|
|
REPO_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", ".."))
|
|
BASE_CONFIG_PATH = os.path.join(REPO_ROOT, "config", "config.json")
|
|
|
|
def _load_json(path: str) -> Dict:
|
|
if not os.path.exists(path):
|
|
return {}
|
|
try:
|
|
with open(path, "r", encoding="utf-8") as f:
|
|
return json.load(f)
|
|
except Exception:
|
|
return {}
|
|
|
|
async def run_full_analysis(org_id: str, config_manager: ConfigManager = None):
|
|
"""
|
|
Asynchronous task to run a full analysis for a given stock.
|
|
This function is market-agnostic and relies on DataManager.
|
|
"""
|
|
logger.info(f"Starting full analysis task for {org_id}")
|
|
|
|
# 1. Load configurations
|
|
if config_manager is None:
|
|
# If called from background task, we need to create a new session
|
|
from app.core.database import AsyncSessionLocal
|
|
async with AsyncSessionLocal() as session:
|
|
config_manager = ConfigManager(db_session=session)
|
|
await _run_analysis_with_config(org_id, config_manager)
|
|
else:
|
|
await _run_analysis_with_config(org_id, config_manager)
|
|
|
|
|
|
async def _run_analysis_with_config(org_id: str, config_manager: ConfigManager):
|
|
"""Internal function to run analysis with a ConfigManager instance"""
|
|
# Load LLM configuration using ConfigManager
|
|
llm_config_result = await config_manager.get_llm_config()
|
|
default_provider = llm_config_result["provider"]
|
|
default_config = llm_config_result["config"]
|
|
global_model = llm_config_result.get("model") # 全局模型配置
|
|
|
|
analysis_config_full = load_analysis_config()
|
|
modules_config = analysis_config_full.get("analysis_modules", {})
|
|
if not modules_config:
|
|
logger.error(f"Analysis modules configuration not found. Aborting analysis for {org_id}.")
|
|
return
|
|
|
|
# 2. Fetch basic company info (name)
|
|
try:
|
|
basic_data = await get_dm().get_stock_basic(stock_code=org_id)
|
|
company_name = basic_data.get("name", org_id) if basic_data else org_id
|
|
logger.info(f"Got company name for {org_id}: {company_name}")
|
|
except Exception as e:
|
|
logger.warning(f"Failed to get company name for {org_id}. Using org_id as name. Error: {e}")
|
|
company_name = org_id
|
|
|
|
# 3. Fetch financial data
|
|
financial_data = None
|
|
try:
|
|
# You might want to make the date range configurable
|
|
from datetime import datetime
|
|
current_year = datetime.now().year
|
|
report_dates = [f"{year}1231" for year in range(current_year - 5, current_year)]
|
|
|
|
financial_statements = await get_dm().get_financial_statements(stock_code=org_id, report_dates=report_dates)
|
|
if financial_statements:
|
|
financial_data = {"series": financial_statements}
|
|
logger.info(f"Successfully fetched financial statements for {org_id}")
|
|
else:
|
|
logger.warning(f"Could not fetch financial statements for {org_id}")
|
|
except Exception as e:
|
|
logger.error(f"Error fetching financial data for {org_id}: {e}")
|
|
|
|
# 4. Execute analysis modules in order (simplified, assumes no complex dependencies for now)
|
|
# Note: A full implementation would need the topological sort from the financial router.
|
|
analysis_results = {}
|
|
for module_type, module_config in modules_config.items():
|
|
logger.info(f"Running analysis module: {module_type} for {org_id}")
|
|
|
|
# 统一使用全局配置,不再从模块配置读取 provider 和 model
|
|
model = global_model or default_config.get("model", "gemini-1.5-flash")
|
|
|
|
# Create client using factory with global config
|
|
client = create_analysis_client(
|
|
provider=default_provider,
|
|
config=default_config,
|
|
model=model
|
|
)
|
|
|
|
# Simplified context: use results from all previously completed modules
|
|
context = analysis_results.copy()
|
|
|
|
result = await client.generate_analysis(
|
|
analysis_type=module_type,
|
|
company_name=company_name,
|
|
ts_code=org_id,
|
|
prompt_template=module_config.get("prompt_template", ""),
|
|
financial_data=financial_data,
|
|
context=context,
|
|
)
|
|
|
|
if result.get("success"):
|
|
analysis_results[module_type] = result.get("content", "")
|
|
logger.info(f"Module {module_type} for {org_id} completed successfully.")
|
|
else:
|
|
logger.error(f"Module {module_type} for {org_id} failed: {result.get('error')}")
|
|
# Store error message to avoid breaking dependencies that might handle missing data
|
|
analysis_results[module_type] = f"Error: Analysis for {module_type} failed."
|
|
|
|
# 5. Save the final report
|
|
# TODO: Implement database logic to save the `analysis_results` to the report record.
|
|
logger.info(f"Full analysis for {org_id} finished. Results: {json.dumps(analysis_results, indent=2, ensure_ascii=False)}")
|
|
|
|
|
|
@router.post("/{market}/{org_id}/reports/generate")
|
|
async def trigger_report_generation(market: str, org_id: str, background_tasks: BackgroundTasks, config_manager: ConfigManager = Depends(get_config_manager)):
|
|
"""
|
|
Triggers a background task to generate a full financial report.
|
|
This endpoint is now market-agnostic.
|
|
"""
|
|
logger.info(f"Received report generation request for {org_id} in {market} market.")
|
|
|
|
# TODO: Create a report record in the database with "generating" status here.
|
|
|
|
# Pass config_manager to the background task
|
|
background_tasks.add_task(run_full_analysis, org_id, config_manager)
|
|
|
|
logger.info(f"Queued analysis task for {org_id}.")
|
|
return {"queued": True, "market": market, "org_id": org_id}
|